[ 558.379049] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 558.379393] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 558.379557] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 558.379825] env[69994]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 558.478078] env[69994]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69994) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 558.488042] env[69994]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69994) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 558.530601] env[69994]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 559.091192] env[69994]: INFO nova.virt.driver [None req-83d2b7a0-71de-4c09-bd9e-bf50f84ac5df None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 559.161765] env[69994]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 559.161936] env[69994]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 559.162055] env[69994]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69994) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 562.082666] env[69994]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-77f4bc6a-21fc-407f-852f-4301e198adf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.099303] env[69994]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69994) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 562.099491] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2d6661b7-0f64-4c09-8f01-ee4eb92b146e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.130155] env[69994]: INFO oslo_vmware.api [-] Successfully established new session; session ID is ed56f. [ 562.130332] env[69994]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.968s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 562.130844] env[69994]: INFO nova.virt.vmwareapi.driver [None req-83d2b7a0-71de-4c09-bd9e-bf50f84ac5df None None] VMware vCenter version: 7.0.3 [ 562.134292] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4563a6-2ea8-4e7f-91bf-6811135bdc85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.151535] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf7df2d-5413-45b2-a3f3-dbdfb5769a93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.157296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faadc851-b982-4605-8a05-b21a4258b2f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.163662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004e7ee3-d914-4965-b8ba-9065d4f4cb37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.176417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff86ea7-0a3e-494d-b577-5477a00eef33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.182161] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ccc14-5e33-4239-96a8-d8c217b1762c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.213417] env[69994]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3a84a4a6-91c8-4753-b375-956efc3db3c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.218260] env[69994]: DEBUG nova.virt.vmwareapi.driver [None req-83d2b7a0-71de-4c09-bd9e-bf50f84ac5df None None] Extension org.openstack.compute already exists. {{(pid=69994) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 562.220846] env[69994]: INFO nova.compute.provider_config [None req-83d2b7a0-71de-4c09-bd9e-bf50f84ac5df None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 562.724390] env[69994]: DEBUG nova.context [None req-83d2b7a0-71de-4c09-bd9e-bf50f84ac5df None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),8b78287e-5c43-41b1-aa8f-b81f8acc4220(cell1) {{(pid=69994) load_cells /opt/stack/nova/nova/context.py:464}} [ 562.726498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 562.726733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 562.727458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 562.727887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Acquiring lock "8b78287e-5c43-41b1-aa8f-b81f8acc4220" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 562.728084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Lock "8b78287e-5c43-41b1-aa8f-b81f8acc4220" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 562.729091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Lock "8b78287e-5c43-41b1-aa8f-b81f8acc4220" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 562.750285] env[69994]: INFO dbcounter [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Registered counter for database nova_cell0 [ 562.761057] env[69994]: INFO dbcounter [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Registered counter for database nova_cell1 [ 563.286743] env[69994]: DEBUG oslo_db.sqlalchemy.engines [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69994) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 563.287146] env[69994]: DEBUG oslo_db.sqlalchemy.engines [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69994) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 563.292382] env[69994]: ERROR nova.db.main.api [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 563.292382] env[69994]: result = function(*args, **kwargs) [ 563.292382] env[69994]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.292382] env[69994]: return func(*args, **kwargs) [ 563.292382] env[69994]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 563.292382] env[69994]: result = fn(*args, **kwargs) [ 563.292382] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 563.292382] env[69994]: return f(*args, **kwargs) [ 563.292382] env[69994]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 563.292382] env[69994]: return db.service_get_minimum_version(context, binaries) [ 563.292382] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 563.292382] env[69994]: _check_db_access() [ 563.292382] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 563.292382] env[69994]: stacktrace = ''.join(traceback.format_stack()) [ 563.292382] env[69994]: [ 563.293188] env[69994]: ERROR nova.db.main.api [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 563.293188] env[69994]: result = function(*args, **kwargs) [ 563.293188] env[69994]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.293188] env[69994]: return func(*args, **kwargs) [ 563.293188] env[69994]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 563.293188] env[69994]: result = fn(*args, **kwargs) [ 563.293188] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 563.293188] env[69994]: return f(*args, **kwargs) [ 563.293188] env[69994]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 563.293188] env[69994]: return db.service_get_minimum_version(context, binaries) [ 563.293188] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 563.293188] env[69994]: _check_db_access() [ 563.293188] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 563.293188] env[69994]: stacktrace = ''.join(traceback.format_stack()) [ 563.293188] env[69994]: [ 563.293570] env[69994]: WARNING nova.objects.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Failed to get minimum service version for cell 8b78287e-5c43-41b1-aa8f-b81f8acc4220 [ 563.293752] env[69994]: WARNING nova.objects.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 563.294253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Acquiring lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.294410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Acquired lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 563.294649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Releasing lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 563.294979] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Full set of CONF: {{(pid=69994) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 563.295137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ******************************************************************************** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 563.295265] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] Configuration options gathered from: {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 563.295400] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 563.295596] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 563.295726] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ================================================================================ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 563.295936] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] allow_resize_to_same_host = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296117] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] arq_binding_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296248] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] backdoor_port = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296371] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] backdoor_socket = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296533] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] block_device_allocate_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] block_device_allocate_retries_interval = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.296865] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cert = self.pem {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.297038] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.297215] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute_monitors = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.297384] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] config_dir = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.297809] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] config_drive_format = iso9660 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.297959] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298154] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] config_source = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298330] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] console_host = devstack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298497] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] control_exchange = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298660] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cpu_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298819] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] daemon = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.298987] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] debug = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.299158] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_access_ip_network_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.299323] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_availability_zone = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.299479] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_ephemeral_format = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.299637] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_green_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.299876] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300048] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] default_schedule_zone = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300207] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] disk_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300369] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] enable_new_services = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300575] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] enabled_apis = ['osapi_compute'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300759] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] enabled_ssl_apis = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.300925] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] flat_injected = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.301097] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] force_config_drive = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.301267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] force_raw_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.301434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] graceful_shutdown_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.301596] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] heal_instance_info_cache_interval = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.301825] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] host = cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302018] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302190] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] initial_disk_allocation_ratio = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302349] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] initial_ram_allocation_ratio = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302599] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302769] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_build_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.302929] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_delete_interval = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303108] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_format = [instance: %(uuid)s] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303278] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_name_template = instance-%08x {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303441] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_usage_audit = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303635] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_usage_audit_period = month {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303805] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.303970] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] instances_path = /opt/stack/data/nova/instances {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304148] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] internal_service_availability_zone = internal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304305] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] key = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] live_migration_retry_count = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304628] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_color = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304799] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_config_append = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.304955] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305122] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_dir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305277] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305404] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_options = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305562] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_rotate_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305738] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_rotate_interval_type = days {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.305899] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] log_rotation_type = none {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306046] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306178] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306348] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306512] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306638] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306803] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] long_rpc_timeout = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.306952] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_concurrent_builds = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307121] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_concurrent_live_migrations = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307277] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_concurrent_snapshots = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307429] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_local_block_devices = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307585] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_logfile_count = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307739] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] max_logfile_size_mb = 200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.307892] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] maximum_instance_delete_attempts = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308066] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metadata_listen = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308233] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metadata_listen_port = 8775 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308399] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metadata_workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308555] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] migrate_max_retries = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308720] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] mkisofs_cmd = genisoimage {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.308926] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] my_block_storage_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309068] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] my_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309277] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309437] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] network_allocate_retries = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309612] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] osapi_compute_listen = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.309938] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] osapi_compute_listen_port = 8774 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310114] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] osapi_compute_unique_server_name_scope = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310282] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] osapi_compute_workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310442] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] password_length = 12 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310629] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] periodic_enable = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310795] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] periodic_fuzzy_delay = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.310961] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] pointer_model = usbtablet {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] preallocate_images = none {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311297] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] publish_errors = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311423] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] pybasedir = /opt/stack/nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311576] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ram_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311733] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rate_limit_burst = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.311892] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rate_limit_except_level = CRITICAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312057] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rate_limit_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312213] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reboot_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312366] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reclaim_instance_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312557] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] record = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312749] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reimage_timeout_per_gb = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.312918] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] report_interval = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313089] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rescue_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313265] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reserved_host_cpus = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313486] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reserved_host_disk_mb = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313661] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reserved_host_memory_mb = 512 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313823] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] reserved_huge_pages = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.313978] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] resize_confirm_window = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314154] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] resize_fs_using_block_device = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314311] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] resume_guests_state_on_host_boot = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314477] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314639] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] rpc_response_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314820] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] run_external_periodic_tasks = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.314958] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] running_deleted_instance_action = reap {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315130] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] running_deleted_instance_poll_interval = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315288] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] running_deleted_instance_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315440] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler_instance_sync_interval = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315604] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_down_time = 720 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315772] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] servicegroup_driver = db {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.315921] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] shell_completion = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316088] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] shelved_offload_time = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316246] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] shelved_poll_interval = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316407] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] shutdown_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316567] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] source_is_ipv6 = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316725] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ssl_only = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.316982] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317163] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] sync_power_state_interval = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317321] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] sync_power_state_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317487] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] syslog_log_facility = LOG_USER {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317644] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] tempdir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317804] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] timeout_nbd = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.317970] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] transport_url = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318145] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] update_resources_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318304] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_cow_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318460] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_journal = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318612] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_json = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318768] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_rootwrap_daemon = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.318921] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_stderr = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319082] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] use_syslog = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319237] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vcpu_pin_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319402] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plugging_is_fatal = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319561] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plugging_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319725] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] virt_mkfs = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.319882] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] volume_usage_poll_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.320043] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] watch_log_file = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.320211] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] web = /usr/share/spice-html5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 563.320395] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.320590] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.320754] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.320923] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_concurrency.disable_process_locking = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.321250] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.321435] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.321603] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.321772] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.321939] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.322126] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.322320] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.auth_strategy = keystone {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.322516] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.compute_link_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.322696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.322875] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.dhcp_domain = novalocal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323067] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.enable_instance_password = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323239] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.glance_link_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323404] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323604] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323774] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.instance_list_per_project_cells = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.323933] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.list_records_by_skipping_down_cells = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.324110] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.local_metadata_per_cell = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.324282] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.max_limit = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.324449] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.metadata_cache_expiration = 15 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.324649] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.neutron_default_tenant_id = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.324846] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.response_validation = warn {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325034] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.use_neutron_default_nets = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325213] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325376] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325545] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325719] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.325889] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_dynamic_targets = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326061] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_jsonfile_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326248] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326445] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.backend = dogpile.cache.memcached {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326615] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.backend_argument = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326775] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.backend_expiration_time = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.326943] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.config_prefix = cache.oslo {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.328491] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.dead_timeout = 60.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.328690] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.debug_cache_backend = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.328866] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.enable_retry_client = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329051] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.enable_socket_keepalive = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329235] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.enabled = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329402] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.enforce_fips_mode = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329570] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.expiration_time = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329740] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.hashclient_retry_attempts = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.329907] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.hashclient_retry_delay = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330081] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_dead_retry = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330247] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330415] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330602] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330775] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_pool_maxsize = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.330967] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.331148] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_sasl_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.331331] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.331534] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_socket_timeout = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.331700] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.memcache_username = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.331873] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.proxies = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332040] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_db = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332204] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332373] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_sentinel_service_name = mymaster {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332569] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332748] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_server = localhost:6379 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.332914] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_socket_timeout = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333080] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.redis_username = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333247] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.retry_attempts = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333407] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.retry_delay = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333601] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.socket_keepalive_count = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333768] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.socket_keepalive_idle = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.333928] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.socket_keepalive_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334095] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.tls_allowed_ciphers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334254] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.tls_cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334410] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.tls_certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334571] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.tls_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334730] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cache.tls_keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.334898] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335084] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335248] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335424] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.catalog_info = volumev3::publicURL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335583] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335748] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.335906] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.cross_az_attach = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336080] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336245] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.endpoint_template = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336404] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.http_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336568] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336767] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.336955] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.os_region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337302] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cinder.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337474] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337637] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.cpu_dedicated_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337796] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.cpu_shared_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.337959] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.image_type_exclude_list = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338138] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338303] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.max_concurrent_disk_ops = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.max_disk_devices_to_attach = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338626] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338798] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.338957] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.resource_provider_association_refresh = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.339132] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.339296] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.shutdown_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.339475] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.339656] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] conductor.workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.339864] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] console.allowed_origins = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340092] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] console.ssl_ciphers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340279] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] console.ssl_minimum_version = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340455] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] consoleauth.enforce_session_timeout = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340652] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] consoleauth.token_ttl = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340831] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.340988] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341168] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341328] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341485] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341647] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341808] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.341961] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342135] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342292] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342447] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342628] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342791] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.342960] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.service_type = accelerator {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343135] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343297] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343456] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343608] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343787] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.343945] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] cyborg.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344127] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.asyncio_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344288] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.asyncio_slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344459] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.backend = sqlalchemy {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344628] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344794] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.connection_debug = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.344962] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.connection_parameters = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.connection_recycle_time = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345300] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.connection_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.db_inc_retry_interval = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345624] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.db_max_retries = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345787] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.db_max_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.345948] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.db_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346122] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.max_overflow = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346283] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.max_pool_size = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346443] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.max_retries = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346614] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346777] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.mysql_wsrep_sync_wait = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.346928] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.pool_timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347258] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347419] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.sqlite_synchronous = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347577] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] database.use_db_reconnect = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347743] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.asyncio_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.347902] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.asyncio_slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348083] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.backend = sqlalchemy {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348255] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348417] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.connection_debug = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348587] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.connection_parameters = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348771] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.connection_recycle_time = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.348956] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.connection_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349133] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.db_inc_retry_interval = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349300] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.db_max_retries = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.db_max_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349624] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.db_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349787] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.max_overflow = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.349948] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.max_pool_size = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350119] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.max_retries = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350289] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350447] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350602] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.pool_timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350764] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.350920] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351092] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] api_database.sqlite_synchronous = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351272] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] devices.enabled_mdev_types = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351450] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351619] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ephemeral_storage_encryption.default_format = luks {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351783] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ephemeral_storage_encryption.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.351946] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352132] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.api_servers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352296] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352454] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352646] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352808] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.352965] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353141] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353307] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.default_trusted_certificate_ids = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353471] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.enable_certificate_validation = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353655] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.enable_rbd_download = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353824] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.353988] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354164] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354319] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354472] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354635] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.num_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354801] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.rbd_ceph_conf = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.354960] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.rbd_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355138] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.rbd_pool = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355302] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.rbd_user = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355466] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.355940] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.service_type = image {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356112] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356272] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356426] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356578] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356757] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.356919] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.verify_glance_signatures = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357084] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] glance.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357251] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] guestfs.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357419] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357583] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357740] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.357891] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358063] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358223] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358380] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358537] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358700] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.358861] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359022] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359334] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359489] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359645] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359811] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.service_type = shared-file-system {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.359971] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.share_apply_policy_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.360144] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.360303] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.360462] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.360657] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.360848] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.361021] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] manila.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.361195] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] mks.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.361554] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.361746] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.manager_interval = 2400 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.361914] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.precache_concurrency = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.362097] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.remove_unused_base_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.362268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.362434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.362658] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] image_cache.subdirectory_name = _base {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.362942] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.api_max_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.363234] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.api_retry_interval = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.363507] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.363790] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364091] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364294] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364472] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364644] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.conductor_group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364808] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.364964] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365139] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365304] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365460] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365617] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365775] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.365937] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.peer_list = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366106] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366429] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.serial_console_state_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366587] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366757] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.service_type = baremetal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.366914] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.shard = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367089] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367252] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367410] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367566] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367747] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.367908] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ironic.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368102] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368280] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] key_manager.fixed_key = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368461] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368621] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.barbican_api_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368781] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.barbican_endpoint = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.368949] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.barbican_endpoint_type = public {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369119] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.barbican_region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369276] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369433] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369593] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369750] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.369918] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370150] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.number_of_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370323] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.retry_delay = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370486] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.send_service_user_token = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370651] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370806] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.370967] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.verify_ssl = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371140] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican.verify_ssl_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371307] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371469] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371626] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.371946] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372117] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372645] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] barbican_service_user.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372834] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.approle_role_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.372997] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.approle_secret_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373183] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.kv_mountpoint = secret {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373344] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.kv_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373510] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.kv_version = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373673] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.namespace = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373830] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.root_token_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.373986] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.ssl_ca_crt_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374171] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.timeout = 60.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374337] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.use_ssl = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374508] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374677] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374842] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.374996] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375169] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375325] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375480] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375641] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375796] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.375949] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376140] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376310] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376474] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376632] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376801] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.service_type = identity {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.376963] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377135] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377293] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377446] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377624] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377781] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] keystone.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.377968] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.ceph_mount_options = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.378441] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.378631] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.connection_uri = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.378798] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_mode = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.378966] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_model_extra_flags = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379153] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_models = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379324] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_power_governor_high = performance {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379491] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_power_governor_low = powersave {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379654] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_power_management = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379823] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.379991] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.device_detach_attempts = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.380169] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.device_detach_timeout = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.380337] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.disk_cachemodes = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.380512] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.disk_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.380721] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.enabled_perf_events = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.380893] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.file_backed_memory = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381071] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.gid_maps = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381234] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.hw_disk_discard = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381388] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.hw_machine_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381556] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_rbd_ceph_conf = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381721] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.381883] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382063] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_rbd_glance_store_name = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382236] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_rbd_pool = rbd {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382405] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_type = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382593] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.images_volume_group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382762] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.inject_key = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.382926] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.inject_partition = -2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383101] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.inject_password = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383266] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.iscsi_iface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383427] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.iser_use_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383589] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_bandwidth = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383754] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.383916] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_downtime = 500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384089] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384250] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384407] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_inbound_addr = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384568] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384771] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_permit_post_copy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.384942] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_scheme = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385129] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_timeout_action = abort {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385295] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_tunnelled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385451] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385613] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.live_migration_with_native_tls = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385772] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.max_queues = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.385933] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.386181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.386344] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.nfs_mount_options = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.386646] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.386825] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.386991] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_iser_scan_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387170] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_memory_encrypted_guests = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387334] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387496] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_pcie_ports = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387661] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.num_volume_scan_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387824] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.pmem_namespaces = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.387984] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.quobyte_client_cfg = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.388298] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.388475] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rbd_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.388642] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.388808] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.388966] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rbd_secret_uuid = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rbd_user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389300] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389470] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.remote_filesystem_transport = ssh {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389630] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rescue_image_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389787] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rescue_kernel_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.389945] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rescue_ramdisk_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.390126] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.390289] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.rx_queue_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.390496] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.smbfs_mount_options = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.390776] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.390955] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.snapshot_compression = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.391132] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.snapshot_image_format = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.391365] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.391537] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.sparse_logical_volumes = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.391701] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.swtpm_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.391869] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.swtpm_group = tss {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392045] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.swtpm_user = tss {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392221] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.sysinfo_serial = unique {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392381] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.tb_cache_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392569] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.tx_queue_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392740] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.uid_maps = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.392907] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.use_virtio_for_bridges = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393090] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.virt_type = kvm {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393265] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.volume_clear = zero {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393428] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.volume_clear_size = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393599] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.volume_enforce_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393759] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.volume_use_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.393914] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_cache_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.394094] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.394267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_mount_group = qemu {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.394433] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_mount_opts = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.394602] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.394899] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395092] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.vzstorage_mount_user = stack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395263] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395436] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395609] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395770] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.395930] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396106] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396427] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396597] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.default_floating_pool = public {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396792] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.396972] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.extension_sync_interval = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397150] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.http_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397318] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397477] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397637] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397808] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.397966] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398153] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.ovs_bridge = br-int {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398319] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.physnets = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398489] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398652] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398824] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.service_metadata_proxy = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.398983] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399168] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.service_type = network {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399334] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399491] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399651] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399807] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.399986] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.400161] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] neutron.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.400338] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.bdms_in_notifications = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.400569] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.default_level = INFO {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.400747] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.include_share_mapping = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.400924] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.notification_format = unversioned {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401101] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.notify_on_state_change = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401279] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401455] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] pci.alias = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] pci.device_spec = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401795] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] pci.report_in_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.401966] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402153] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402322] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402503] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402673] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402841] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.402999] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403175] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403335] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.default_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403498] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.default_domain_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403666] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403824] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.domain_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.403981] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.404156] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.404315] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.404472] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.404668] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.404874] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405051] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.project_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405227] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.project_domain_name = Default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405396] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.project_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405569] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.project_name = service {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405745] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.405905] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406080] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406254] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.service_type = placement {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406418] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406576] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406735] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.406893] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.system_scope = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407060] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407222] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.trust_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407379] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.user_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407546] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.user_domain_name = Default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407706] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.user_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.407876] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.username = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.408066] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.408231] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] placement.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.408415] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.cores = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.408580] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.count_usage_from_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.408796] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409007] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.injected_file_content_bytes = 10240 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409191] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.injected_file_path_length = 255 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409361] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.injected_files = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409527] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.instances = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409697] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.key_pairs = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.409862] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.metadata_items = 128 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410037] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.ram = 51200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410211] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.recheck_quota = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410390] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.server_group_members = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410577] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.server_groups = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410795] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.410970] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] quota.unified_limits_resource_strategy = require {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.411160] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.411326] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.411487] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.image_metadata_prefilter = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.411649] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.411810] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.max_attempts = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412012] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.max_placement_results = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412188] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412351] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.query_placement_for_image_type_support = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412519] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412693] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] scheduler.workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.412909] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413113] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413300] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413493] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413646] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413810] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.413975] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.414178] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.414346] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.host_subset_size = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.414509] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.414670] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.414853] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415043] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415224] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415389] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.isolated_hosts = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415591] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.isolated_images = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415719] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.415877] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416044] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416209] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.pci_in_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416368] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416525] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416685] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416841] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.416998] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417172] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417334] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.track_instance_changes = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417505] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417675] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metrics.required = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417838] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metrics.weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.417998] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.418179] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] metrics.weight_setting = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.418521] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.418694] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.418946] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.port_range = 10000:20000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.419107] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.419346] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.419533] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] serial_console.serialproxy_port = 6083 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.419711] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.419884] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420058] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420222] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420393] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420561] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420723] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.420895] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.send_service_user_token = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.421069] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.421230] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] service_user.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.421495] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.agent_enabled = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.421568] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.421879] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422107] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422415] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.html5proxy_port = 6082 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422546] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.image_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422629] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.jpeg_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422788] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.playback_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.422954] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.require_secure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.423144] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.server_listen = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.423317] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.423598] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.423772] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.streaming_mode = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.423933] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] spice.zlib_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424113] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] upgrade_levels.baseapi = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424289] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] upgrade_levels.compute = auto {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424449] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] upgrade_levels.conductor = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424606] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] upgrade_levels.scheduler = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424777] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.424937] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425103] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425261] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425422] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425579] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425735] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.425894] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426059] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vendordata_dynamic_auth.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426237] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.api_retry_count = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426398] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426568] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.cache_prefix = devstack-image-cache {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426739] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.cluster_name = testcl1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.426904] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.connection_pool_size = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427075] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.console_delay_seconds = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427249] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.datastore_regex = ^datastore.* {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427470] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427649] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.host_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427815] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.host_port = 443 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.427985] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.host_username = administrator@vsphere.local {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428170] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.insecure = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428332] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.integration_bridge = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428495] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.maximum_objects = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428654] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.pbm_default_policy = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428812] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.pbm_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.428970] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.pbm_wsdl_location = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429149] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429307] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.serial_port_proxy_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429462] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.serial_port_service_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.task_poll_interval = 0.5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429799] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.use_linked_clone = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.429965] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.vnc_keymap = en-us {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.430145] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.vnc_port = 5900 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.430308] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vmware.vnc_port_total = 10000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.430506] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.auth_schemes = ['none'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.430710] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.431034] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.431275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.431559] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.novncproxy_port = 6080 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.431876] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.server_listen = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.432203] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.432509] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.vencrypt_ca_certs = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.432775] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.vencrypt_client_cert = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433060] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vnc.vencrypt_client_key = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433278] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433479] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_deep_image_inspection = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433614] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.433943] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434118] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.disable_rootwrap = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434279] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.enable_numa_live_migration = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434440] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434595] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434754] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.434912] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.libvirt_disable_apic = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.435085] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.435251] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.435606] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.435777] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.435941] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436116] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436282] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436441] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436600] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436766] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.436950] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437130] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.client_socket_timeout = 900 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437299] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.default_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437468] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.keep_alive = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437638] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.max_header_line = 16384 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437801] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.secure_proxy_ssl_header = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.437962] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.ssl_ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438136] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.ssl_cert_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438298] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.ssl_key_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.tcp_keepidle = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438637] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438807] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] zvm.ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.438968] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] zvm.cloud_connector_url = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.439303] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.439482] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] zvm.reachable_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.439656] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.439834] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440027] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.connection_string = messaging:// {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440203] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440375] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.es_doc_type = notification {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440556] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.es_scroll_size = 10000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440739] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.es_scroll_time = 2m {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.440904] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.filter_error_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441084] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.hmac_keys = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441255] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.sentinel_service_name = mymaster {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441421] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.socket_timeout = 0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441583] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.trace_requests = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441746] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler.trace_sqlalchemy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.441927] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler_jaeger.process_tags = {} {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler_jaeger.service_name_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442264] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] profiler_otlp.service_name_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442426] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] remote_debug.host = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442611] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] remote_debug.port = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442795] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.442959] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443136] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443299] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443462] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443629] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.443942] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444117] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444291] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444452] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444627] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444797] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.444967] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445145] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445318] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445484] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445650] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445821] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.445985] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446164] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446331] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446497] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446661] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446823] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.446983] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447157] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447319] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447479] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447642] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447802] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.447972] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448152] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448316] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448480] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448649] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.ssl_version = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448809] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.448994] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.449175] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_notifications.retry = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.449352] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.449524] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_messaging_notifications.transport_url = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.449700] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.449864] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450034] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450198] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450365] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450522] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450682] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.450839] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451014] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_interface = publicURL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451180] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451338] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451496] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451654] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.endpoint_service_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451814] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.451972] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452143] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452299] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452454] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452616] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452774] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.452933] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.service_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453106] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453424] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453641] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453826] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.valid_interfaces = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.453988] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_limit.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.454172] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_reports.file_event_handler = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.454339] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.454497] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] oslo_reports.log_dir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.454673] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.454856] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455012] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455187] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455355] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455514] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455686] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.455844] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456007] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456182] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456345] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456504] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] vif_plug_ovs_privileged.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456684] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.flat_interface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.456858] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.457040] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.457217] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.457388] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.457560] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.457757] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458039] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458148] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458326] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.isolate_vif = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458496] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458664] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.458837] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459024] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.ovsdb_interface = native {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459198] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] os_vif_ovs.per_port_bridge = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459369] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.capabilities = [21] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459532] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459694] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.459859] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460033] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460200] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] privsep_osbrick.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460377] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460537] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460695] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.460860] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.461110] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.461197] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] nova_sys_admin.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 563.461316] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-820d72b1-af2c-4d69-8eec-46fa002fae9d None None] ******************************************************************************** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 563.461754] env[69994]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 563.965060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Getting list of instances from cluster (obj){ [ 563.965060] env[69994]: value = "domain-c8" [ 563.965060] env[69994]: _type = "ClusterComputeResource" [ 563.965060] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 563.969033] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d44de55-c99f-4a3d-94ec-ada828f12542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.975535] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 563.976152] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 563.976660] env[69994]: INFO nova.virt.node [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Generated node identity 2173cd1f-90eb-4aab-b51d-83c140d1a7be [ 563.976890] env[69994]: INFO nova.virt.node [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Wrote node identity 2173cd1f-90eb-4aab-b51d-83c140d1a7be to /opt/stack/data/n-cpu-1/compute_id [ 564.479743] env[69994]: WARNING nova.compute.manager [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Compute nodes ['2173cd1f-90eb-4aab-b51d-83c140d1a7be'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 565.486227] env[69994]: INFO nova.compute.manager [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 566.492031] env[69994]: WARNING nova.compute.manager [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 566.492422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 566.492514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 566.492688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 566.492843] env[69994]: DEBUG nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 566.493774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b0b551-54e6-4000-a0cc-a9fa67250401 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.502044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d82bc86-638f-4bd7-a295-86fef9515eb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.516167] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1031f0c9-3c60-4b96-9f41-453addd799b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.522411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d246f0fc-eaf1-4de6-9987-be169275f7f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.550174] env[69994]: DEBUG nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181117MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 566.550319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 566.550540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 567.052860] env[69994]: WARNING nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] No compute node record for cpu-1:2173cd1f-90eb-4aab-b51d-83c140d1a7be: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 2173cd1f-90eb-4aab-b51d-83c140d1a7be could not be found. [ 567.557370] env[69994]: INFO nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 2173cd1f-90eb-4aab-b51d-83c140d1a7be [ 569.065540] env[69994]: DEBUG nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 569.065915] env[69994]: DEBUG nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 569.220200] env[69994]: INFO nova.scheduler.client.report [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] [req-c9b06353-5157-4991-8be3-8f60ad46d5f5] Created resource provider record via placement API for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 569.237609] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942125a9-c2ee-45f5-a6ce-1239ae463b19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.245223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae92adf-eb45-4e0f-8794-98b5a8fa98c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.275461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f17886-730a-4575-a80b-f74ef897af59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.283089] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4946f5-9015-4636-9129-6bc4626cbfb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.296056] env[69994]: DEBUG nova.compute.provider_tree [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 569.831591] env[69994]: DEBUG nova.scheduler.client.report [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 569.831822] env[69994]: DEBUG nova.compute.provider_tree [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 0 to 1 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 569.831960] env[69994]: DEBUG nova.compute.provider_tree [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 569.880529] env[69994]: DEBUG nova.compute.provider_tree [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 1 to 2 during operation: update_traits {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 570.387853] env[69994]: DEBUG nova.compute.resource_tracker [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 570.387853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.835s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 570.387853] env[69994]: DEBUG nova.service [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Creating RPC server for service compute {{(pid=69994) start /opt/stack/nova/nova/service.py:186}} [ 570.404026] env[69994]: DEBUG nova.service [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] Join ServiceGroup membership for this service compute {{(pid=69994) start /opt/stack/nova/nova/service.py:203}} [ 570.404026] env[69994]: DEBUG nova.servicegroup.drivers.db [None req-b9dc763f-a61e-4841-8e85-fb71f6d689dc None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69994) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 585.406898] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.910237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Getting list of instances from cluster (obj){ [ 585.910237] env[69994]: value = "domain-c8" [ 585.910237] env[69994]: _type = "ClusterComputeResource" [ 585.910237] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 585.911455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c5a210-6709-40b3-a9fc-ce04cee7ccf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.920188] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 585.920410] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.920723] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Getting list of instances from cluster (obj){ [ 585.920723] env[69994]: value = "domain-c8" [ 585.920723] env[69994]: _type = "ClusterComputeResource" [ 585.920723] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 585.921685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c61d9bd-33ad-4fce-806c-b20fc22fbb22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.928922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 612.539057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.539389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.042624] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.582422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.582644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.585826] env[69994]: INFO nova.compute.claims [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.650373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.650678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.678143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ab44b3-6358-48d5-a5bc-a9be6548dd91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.693208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347aded0-2a6a-49cd-bb3a-dac38a3fc5fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.731701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b777ae-aad4-48c9-97ce-71abb89b291e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.740115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175ecb01-d710-46c3-9be7-e6688ef3ed68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.754939] env[69994]: DEBUG nova.compute.provider_tree [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.131770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.132498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.158825] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.257711] env[69994]: DEBUG nova.scheduler.client.report [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.635092] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.683241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.759682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.759949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.762363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.180s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.762959] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 615.771823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.088s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.772833] env[69994]: INFO nova.compute.claims [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.812710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "1232f601-3339-4fc2-92b2-aa550af90b01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.812710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.062672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.063830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.180377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.263775] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.278270] env[69994]: DEBUG nova.compute.utils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 616.285776] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 616.286313] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.317159] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.568886] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.804617] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.818278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.849644] env[69994]: DEBUG nova.policy [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d8791d6988647dfbddb14dd0f20fdc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8006f13c854149c984ef83431560c2e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 616.888508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.979948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef7c08a-405a-4d15-a494-0f60ccc659f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.989288] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b901a20a-d4a0-476b-ad5c-06b40374931a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.030183] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba51f1-4f94-4ec5-89a9-8e91859a4a15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.036707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5738d1e4-18d6-4e91-a4aa-f08ff3d78798 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.052632] env[69994]: DEBUG nova.compute.provider_tree [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.096290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.406886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.407171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.555747] env[69994]: DEBUG nova.scheduler.client.report [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.632210] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Successfully created port: 3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.826695] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.865733] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 617.865733] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.865733] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 617.865895] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.866140] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 617.866532] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 617.866854] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 617.867197] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 617.869890] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 617.869890] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 617.869890] env[69994]: DEBUG nova.virt.hardware [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 617.869890] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed693769-e652-472f-9296-bbcc40c2fefa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.879880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81a5300-ba21-46ed-abd9-efa4d6ab9b49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.906268] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae49a60-864b-41da-8bd6-91e852b61fce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.918017] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 618.064021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.064021] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 618.066554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.886s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.072377] env[69994]: INFO nova.compute.claims [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.446630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.541330] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.541432] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.541663] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.541815] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.541984] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.545299] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.546040] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.546040] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 618.546040] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.580455] env[69994]: DEBUG nova.compute.utils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 618.585776] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 618.585776] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 618.694518] env[69994]: DEBUG nova.policy [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2926ede2b398473b9a6d51e53912f26e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '569fef1e170f4ca4b91dda2282e58d79', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 619.049534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.085756] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 619.199808] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Successfully created port: c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.230916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bdd2ff-305c-412b-8f3f-f23f23707c6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.242023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8e5719-6392-4cbe-90d0-ab659e26146b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.281323] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778e330b-cf15-4324-8938-1d3977fdb532 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.291867] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8b67e8-070f-4f1b-aa6e-39f91ee00182 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.310979] env[69994]: DEBUG nova.compute.provider_tree [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.816156] env[69994]: DEBUG nova.scheduler.client.report [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.103792] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 620.146791] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 620.147058] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.147666] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 620.147666] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.147666] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 620.147765] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 620.147989] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 620.148062] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 620.148207] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 620.148362] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 620.148528] env[69994]: DEBUG nova.virt.hardware [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 620.149505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b18b2e8-c90a-41b8-acb6-32d344220492 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.158477] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c14942-2c03-43dc-a47e-aa3c2253ab71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.262410] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Successfully updated port: 3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.321262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.321945] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 620.325166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.510s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.332647] env[69994]: INFO nova.compute.claims [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.385692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.386082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.773669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.773669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquired lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.773669] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.836896] env[69994]: DEBUG nova.compute.utils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.840725] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.840854] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.892923] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 620.965362] env[69994]: DEBUG nova.policy [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '259d2abfb6424c7c9f774a05c1ec97d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90ef47b60ab64229981b7010c4f5cd46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 621.195522] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Successfully updated port: c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 621.341281] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 621.422071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.505263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c424eb8e-6682-4931-a43c-706aa24f6ad0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.516237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e31321-4f32-4a22-8a94-887313767eb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.550270] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.552732] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16be587b-8124-4381-b0ca-94629e9ab491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.560646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0510c4-97e7-42f6-b5c2-4a5c3644bd79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.576820] env[69994]: DEBUG nova.compute.provider_tree [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.700833] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.702921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.702921] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.015019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.015907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.036396] env[69994]: DEBUG nova.network.neutron [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Updating instance_info_cache with network_info: [{"id": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "address": "fa:16:3e:73:bd:be", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f5f9119-ac", "ovs_interfaceid": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.077307] env[69994]: DEBUG nova.scheduler.client.report [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.103043] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Successfully created port: cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.255901] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.360130] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 622.395300] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 622.395553] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.396187] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.396187] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.396187] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.396187] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 622.396366] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 622.396648] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 622.397488] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 622.397488] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 622.397488] env[69994]: DEBUG nova.virt.hardware [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 622.399199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329911be-ee12-47f5-a5cd-1fd3f064cdde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.409125] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13874caa-58f1-48f1-baf9-4ac3ef896478 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.519731] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.541351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Releasing lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.541351] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Instance network_info: |[{"id": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "address": "fa:16:3e:73:bd:be", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f5f9119-ac", "ovs_interfaceid": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 622.541534] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:bd:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f5f9119-ace0-4dc6-85ae-35541cd46022', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.557327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.558008] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c00be2b1-d3a1-4cc3-ae17-30b171fd5ecf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.566525] env[69994]: DEBUG nova.network.neutron [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Updating instance_info_cache with network_info: [{"id": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "address": "fa:16:3e:31:7f:43", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc87f9440-73", "ovs_interfaceid": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.572637] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Created folder: OpenStack in parent group-v4. [ 622.572836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating folder: Project (8006f13c854149c984ef83431560c2e3). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.573567] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5747476f-09c5-44bf-ae62-84118bff035c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.584894] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Created folder: Project (8006f13c854149c984ef83431560c2e3) in parent group-v587342. [ 622.585150] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating folder: Instances. Parent ref: group-v587343. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.588415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.590034] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.595035] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b99009d-4635-4fdc-b3bb-90490a56fb22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.597119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.709s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.598427] env[69994]: INFO nova.compute.claims [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.609128] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Created folder: Instances in parent group-v587343. [ 622.609389] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 622.609576] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.610112] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5ce9388-01d5-4ce7-951e-3bd69c5e6b51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.636117] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.636117] env[69994]: value = "task-2924911" [ 622.636117] env[69994]: _type = "Task" [ 622.636117] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.645637] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924911, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.989343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "627f89ad-0381-4de9-a429-c74e26975ce9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.989717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.041193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.049063] env[69994]: DEBUG nova.compute.manager [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Received event network-vif-plugged-3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 623.049618] env[69994]: DEBUG oslo_concurrency.lockutils [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] Acquiring lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.049618] env[69994]: DEBUG oslo_concurrency.lockutils [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.049618] env[69994]: DEBUG oslo_concurrency.lockutils [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.049855] env[69994]: DEBUG nova.compute.manager [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] No waiting events found dispatching network-vif-plugged-3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 623.049978] env[69994]: WARNING nova.compute.manager [req-abf7d9b7-1126-4447-acbe-605f4a28a576 req-a88951ca-a99d-4867-8255-db2b81536522 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Received unexpected event network-vif-plugged-3f5f9119-ace0-4dc6-85ae-35541cd46022 for instance with vm_state building and task_state spawning. [ 623.069622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.069945] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Instance network_info: |[{"id": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "address": "fa:16:3e:31:7f:43", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc87f9440-73", "ovs_interfaceid": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 623.070379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:7f:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c87f9440-73bd-4854-863b-5e6a47bb7faf', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 623.080325] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Creating folder: Project (569fef1e170f4ca4b91dda2282e58d79). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.081150] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dddfd26f-35ca-4854-a9ee-79cce0f798fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.093663] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Created folder: Project (569fef1e170f4ca4b91dda2282e58d79) in parent group-v587342. [ 623.094147] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Creating folder: Instances. Parent ref: group-v587346. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.094147] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9182c020-96d3-412d-a471-40d1631dafd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.098484] env[69994]: DEBUG nova.compute.utils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 623.099882] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 623.100059] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 623.108293] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Created folder: Instances in parent group-v587346. [ 623.109250] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.110701] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 623.111105] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5c14ce9-7eff-46a7-884f-75c6c0135ca8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.158150] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 623.158150] env[69994]: value = "task-2924914" [ 623.158150] env[69994]: _type = "Task" [ 623.158150] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.168639] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924911, 'name': CreateVM_Task, 'duration_secs': 0.373604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.169031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.175757] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924914, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.187576] env[69994]: DEBUG oslo_vmware.service [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcd5043-e2a1-4478-a52b-a7a5fb1e4ea5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.194170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.194335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.195692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.195692] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0d3160-3f9e-4b31-bcd5-a42ee524cea4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.201044] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 623.201044] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526aff65-ae55-5da1-8961-8e75e8f4002e" [ 623.201044] env[69994]: _type = "Task" [ 623.201044] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.209867] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526aff65-ae55-5da1-8961-8e75e8f4002e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.211343] env[69994]: DEBUG nova.policy [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc0799f063d84f6aa0953ecb32f106ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5642969c42ae403cbfb4d5989e399f8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 623.433486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "443382a8-64af-4f13-b7ab-11234fb13fcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.433805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.495239] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.605307] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.671312] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924914, 'name': CreateVM_Task, 'duration_secs': 0.494645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.671454] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.673036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.712991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.713545] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.713926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.714227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.715155] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.715155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.715528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.715806] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac9f6312-f247-41dd-92e3-940c283d9f59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.721184] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de82707-d7e3-484f-bc11-a931c8bec412 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.726897] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 623.726897] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52328acb-c9bc-925d-d646-2464824c3412" [ 623.726897] env[69994]: _type = "Task" [ 623.726897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.738318] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52328acb-c9bc-925d-d646-2464824c3412, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.746305] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.746611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.747639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e83aef-2c88-47e7-ba64-db5d6ff9a4e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.755846] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51b985e4-6742-407c-ad1e-bd2d14f42705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.765420] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 623.765420] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a610f7-f529-7c97-e941-068f5dc04ca5" [ 623.765420] env[69994]: _type = "Task" [ 623.765420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.769970] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Successfully created port: 6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.782569] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a610f7-f529-7c97-e941-068f5dc04ca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.943043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12516de-cd39-4cd3-b600-e505d171bd0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.955076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ebc968-06ef-43bd-b5aa-ca83c135359d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.009077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fe3450-da52-4593-906e-799463e20587 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.019987] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f5f411-0a2b-4ab2-ad3b-e6de7028c3fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.040370] env[69994]: DEBUG nova.compute.provider_tree [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.043573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.222256] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Successfully updated port: cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 624.239427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.239680] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.239891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.277643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 624.277989] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating directory with path [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.278429] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bec4661-b175-4174-9e78-7d8eac78e3c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.313427] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Created directory with path [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.313987] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Fetch image to [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 624.314255] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Downloading image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk on the data store datastore1 {{(pid=69994) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 624.315409] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e22fa9-1533-4d4e-8bf9-522b8d495955 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.324988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12485f2b-e8aa-47d0-8073-ce0e9664319b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.336507] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf23126d-d30d-48fa-a83e-93dbc4666a56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.374263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79119a61-b42e-434b-888f-48a6340499c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.380901] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef4a8b5a-72c5-4fd2-b78d-ce14bcb331d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.475724] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Downloading image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to the data store datastore1 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 624.550883] env[69994]: DEBUG nova.scheduler.client.report [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.561271] env[69994]: DEBUG oslo_vmware.rw_handles [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 624.624984] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.726149] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.726311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquired lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.726506] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.768390] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.768693] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.769213] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.769457] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.769634] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.769809] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.770621] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.770621] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.770621] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.770621] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.770867] env[69994]: DEBUG nova.virt.hardware [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.772184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f48b62-cfce-491a-b0fc-8668df11824c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.787714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d610bd-0620-4084-ae46-859b3194be8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.058169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.058842] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 625.062052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.965s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.064247] env[69994]: INFO nova.compute.claims [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.210360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "75e952e7-6761-49a4-9193-175f5d30494e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.210614] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.287043] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.493509] env[69994]: DEBUG oslo_vmware.rw_handles [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 625.493818] env[69994]: DEBUG oslo_vmware.rw_handles [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 625.567652] env[69994]: DEBUG nova.compute.utils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 625.572550] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Downloaded image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk on the data store datastore1 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 625.574368] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 625.574616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Copying Virtual Disk [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk to [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.575247] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 625.575423] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 625.580147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b23f196c-7598-4f29-a49a-107de0c554ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.582924] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 625.586202] env[69994]: DEBUG nova.network.neutron [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Updating instance_info_cache with network_info: [{"id": "cc0658e0-6fc9-45af-9d60-534898bf6858", "address": "fa:16:3e:be:63:1e", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0658e0-6f", "ovs_interfaceid": "cc0658e0-6fc9-45af-9d60-534898bf6858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.595700] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 625.595700] env[69994]: value = "task-2924915" [ 625.595700] env[69994]: _type = "Task" [ 625.595700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.606245] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.785979] env[69994]: DEBUG nova.policy [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6665d4841614de4b00eec0666b54c65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '227f00137ddc436abf0892d7d9ae73ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.103155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Releasing lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.103155] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Instance network_info: |[{"id": "cc0658e0-6fc9-45af-9d60-534898bf6858", "address": "fa:16:3e:be:63:1e", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0658e0-6f", "ovs_interfaceid": "cc0658e0-6fc9-45af-9d60-534898bf6858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 626.113869] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Successfully updated port: 6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.119306] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:63:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc0658e0-6fc9-45af-9d60-534898bf6858', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.130161] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Creating folder: Project (90ef47b60ab64229981b7010c4f5cd46). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.130161] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07bbd78a-360d-45dd-8425-f1dcab06ec7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.137556] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.141306] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Created folder: Project (90ef47b60ab64229981b7010c4f5cd46) in parent group-v587342. [ 626.141306] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Creating folder: Instances. Parent ref: group-v587349. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.141443] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-814a6c2b-56b2-48ad-9607-29d047f545e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.151464] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Created folder: Instances in parent group-v587349. [ 626.151984] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 626.152070] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.152364] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1db27765-62c9-4ea1-831b-728b1c9d3321 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.172406] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.172406] env[69994]: value = "task-2924918" [ 626.172406] env[69994]: _type = "Task" [ 626.172406] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.180343] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924918, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.378888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b27583-f143-40ea-bfb3-b29712ca288b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.390343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58eab323-c270-45fb-aae1-8b3b1626f100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.431656] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f2a7be-741b-44cf-a0bf-955a28aec3ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.439620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffea664e-5564-4029-b78f-4dd89e9e5d78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.454234] env[69994]: DEBUG nova.compute.provider_tree [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.606755] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 626.616229] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924915, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.633717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.633717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.633717] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.652139] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.652139] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.652139] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.652287] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.652287] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.652287] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.654769] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.654769] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.654769] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.654769] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.654769] env[69994]: DEBUG nova.virt.hardware [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.655154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8cd4ec-ad3c-42d7-a1a1-5ca09c04841c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.664390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93d9f8b-e1ab-4c5b-98d0-031b0b078e6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.690742] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924918, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.916168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.916414] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.926366] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Successfully created port: aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.957921] env[69994]: DEBUG nova.scheduler.client.report [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.964951] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Received event network-changed-3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 626.965167] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Refreshing instance network info cache due to event network-changed-3f5f9119-ace0-4dc6-85ae-35541cd46022. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 626.965560] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquiring lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.965652] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquired lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.966244] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Refreshing network info cache for port 3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.111570] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924915, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.257741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.112013] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Copied Virtual Disk [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk to [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.112291] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleting the datastore file [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 627.112977] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4751767e-fb89-486c-9d03-f23f5747ed57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.119589] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 627.119589] env[69994]: value = "task-2924919" [ 627.119589] env[69994]: _type = "Task" [ 627.119589] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.128697] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.174431] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.196024] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924918, 'name': CreateVM_Task, 'duration_secs': 0.559509} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.196024] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.196024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.196024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.196024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.196024] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4052fd1-c4db-4423-9919-cc2dbd753829 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.202624] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 627.202624] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521097e6-231d-d4fa-b113-ea84392d1d1d" [ 627.202624] env[69994]: _type = "Task" [ 627.202624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.212486] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521097e6-231d-d4fa-b113-ea84392d1d1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.471163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.471163] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.475524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.029s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.477464] env[69994]: INFO nova.compute.claims [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.629165] env[69994]: DEBUG nova.network.neutron [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.633552] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022674} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.634554] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 627.634877] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Moving file from [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48. {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 627.635263] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-cfacf363-25bf-46b7-a85d-9851aad3a87a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.642701] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 627.642701] env[69994]: value = "task-2924920" [ 627.642701] env[69994]: _type = "Task" [ 627.642701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.713711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.713965] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.714190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.976054] env[69994]: DEBUG nova.compute.utils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 627.979424] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 628.136246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.136577] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Instance network_info: |[{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 628.137027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:4a:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6634b7a0-01a3-49e4-a7ac-6f8572d86925', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.149857] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Creating folder: Project (5642969c42ae403cbfb4d5989e399f8d). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.149857] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Updated VIF entry in instance network info cache for port 3f5f9119-ace0-4dc6-85ae-35541cd46022. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.149989] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Updating instance_info_cache with network_info: [{"id": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "address": "fa:16:3e:73:bd:be", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.156", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f5f9119-ac", "ovs_interfaceid": "3f5f9119-ace0-4dc6-85ae-35541cd46022", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.150454] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-112dfdaa-e5f9-4cc4-b614-1204822189c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.163250] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924920, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.084363} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.165407] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] File moved {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 628.165633] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Cleaning up location [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 628.166278] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleting the datastore file [datastore1] vmware_temp/117065a3-6372-4920-80f5-7cb74cc665cb {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.166376] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Created folder: Project (5642969c42ae403cbfb4d5989e399f8d) in parent group-v587342. [ 628.166499] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Creating folder: Instances. Parent ref: group-v587352. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 628.166729] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-402a6097-6938-4449-8f9e-63920ccd1b9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.169033] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f0549d4-166a-42d4-a3ea-c932bcef0e56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.175454] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 628.175454] env[69994]: value = "task-2924922" [ 628.175454] env[69994]: _type = "Task" [ 628.175454] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.180883] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Created folder: Instances in parent group-v587352. [ 628.181208] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.181827] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 628.183109] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c0a08ba-c170-4bcf-9129-775eadd6d11d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.203295] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.207643] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.207643] env[69994]: value = "task-2924924" [ 628.207643] env[69994]: _type = "Task" [ 628.207643] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.219814] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924924, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.482232] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.652903] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Releasing lock "refresh_cache-dc548f2f-e6d6-4273-8c24-b4f52842e0d2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.653176] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Received event network-vif-plugged-c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.653480] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquiring lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.653480] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.653598] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.653762] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] No waiting events found dispatching network-vif-plugged-c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.654022] env[69994]: WARNING nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Received unexpected event network-vif-plugged-c87f9440-73bd-4854-863b-5e6a47bb7faf for instance with vm_state building and task_state spawning. [ 628.654186] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Received event network-changed-c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.654350] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Refreshing instance network info cache due to event network-changed-c87f9440-73bd-4854-863b-5e6a47bb7faf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.654528] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquiring lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.654664] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquired lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.654814] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Refreshing network info cache for port c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.696094] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024197} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.697298] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 628.698513] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5f01502-4725-4a7e-ba0b-67fa945f9f4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.706601] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 628.706601] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f0f05-12d9-dbac-cd7e-48070726bf06" [ 628.706601] env[69994]: _type = "Task" [ 628.706601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.725938] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f0f05-12d9-dbac-cd7e-48070726bf06, 'name': SearchDatastore_Task, 'duration_secs': 0.009688} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.733226] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.733226] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] dc548f2f-e6d6-4273-8c24-b4f52842e0d2/dc548f2f-e6d6-4273-8c24-b4f52842e0d2.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.733226] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924924, 'name': CreateVM_Task, 'duration_secs': 0.349262} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.736075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.736258] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 628.736455] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b64cbcdf-db77-4301-a2f5-10ebed4f85de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.738546] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 628.738974] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71d1781c-d5ff-4eef-a6dc-70870edf6b65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.741376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.741649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.741957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 628.743061] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24666c9e-e0f2-41a9-94c2-254adab6d3ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.747474] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 628.747474] env[69994]: value = "task-2924925" [ 628.747474] env[69994]: _type = "Task" [ 628.747474] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.753321] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 628.754184] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 628.756117] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a04018-70ea-4f2d-a1e7-374b57761cad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.766664] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.768620] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 628.768620] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521b90f8-d361-d69a-e452-534cbcccc24f" [ 628.768620] env[69994]: _type = "Task" [ 628.768620] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.773294] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 628.773294] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526956a7-5af0-2dd7-83a0-2556e563371c" [ 628.773294] env[69994]: _type = "Task" [ 628.773294] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.780843] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521b90f8-d361-d69a-e452-534cbcccc24f, 'name': SearchDatastore_Task, 'duration_secs': 0.008196} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.781519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.781893] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.782132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.785545] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526956a7-5af0-2dd7-83a0-2556e563371c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.791999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a857ffde-2489-4c3a-9201-62f8983b254a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.802520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5768fce0-3f00-49b0-b4f9-494afeea1948 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.837801] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cc7333-fb0b-4fd6-9ed6-d1c3e844401e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.847882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef1d942-424f-4be6-b576-e3fd7dbbd095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.866047] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.260066] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924925, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.284774] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526956a7-5af0-2dd7-83a0-2556e563371c, 'name': SearchDatastore_Task, 'duration_secs': 0.01867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.285813] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c19a68-7b76-42d5-9f83-17b81d341776 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.292055] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 629.292055] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52da404a-58a0-d8f9-262d-7c224b70af9a" [ 629.292055] env[69994]: _type = "Task" [ 629.292055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.302242] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52da404a-58a0-d8f9-262d-7c224b70af9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.363946] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Successfully updated port: aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.393523] env[69994]: ERROR nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [req-31b5fc30-61c5-45ad-ab97-69227bf33e3b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-31b5fc30-61c5-45ad-ab97-69227bf33e3b"}]} [ 629.420462] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 629.438372] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 629.441017] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.453035] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 629.471994] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 629.495280] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.541713] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 629.541990] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.542773] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 629.542773] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.542773] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 629.542773] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 629.543510] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 629.543510] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 629.543510] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 629.544211] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 629.544211] env[69994]: DEBUG nova.virt.hardware [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 629.544621] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e493a459-db34-43fa-99d3-3c05354e3077 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.559736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ed26df-eed1-4926-a5d5-66ada0cc748d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.575397] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.581494] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating folder: Project (bb59791386d7453d915d6ded7627e81c). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.584668] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86a2ea87-e4b1-4935-ba97-50f080b1e38f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.596113] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Created folder: Project (bb59791386d7453d915d6ded7627e81c) in parent group-v587342. [ 629.596113] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating folder: Instances. Parent ref: group-v587355. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.596113] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a127fea-070f-44d7-ad01-3e62b833a2a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.604979] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Created folder: Instances in parent group-v587355. [ 629.604979] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 629.605145] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 629.605379] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d2dc9a9-4ddf-43f5-8287-6583705aa168 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.631296] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 629.631296] env[69994]: value = "task-2924928" [ 629.631296] env[69994]: _type = "Task" [ 629.631296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.642563] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924928, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.746583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f198c3-21be-4cd9-b5a3-796d05288b3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.765807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d6fa2b-ff04-4b1e-a4b2-140c2cfd795a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.771422] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778401} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.775331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] dc548f2f-e6d6-4273-8c24-b4f52842e0d2/dc548f2f-e6d6-4273-8c24-b4f52842e0d2.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.775667] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.776358] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d333da0-6771-491d-ad37-6ca0876381ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.812980] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12175700-33ba-4d89-95d5-79353305f56c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.817431] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 629.817431] env[69994]: value = "task-2924929" [ 629.817431] env[69994]: _type = "Task" [ 629.817431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.828218] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52da404a-58a0-d8f9-262d-7c224b70af9a, 'name': SearchDatastore_Task, 'duration_secs': 0.020275} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.828756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.829186] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1d5b8fb7-eeb0-49da-acdf-53b7741e863e/1d5b8fb7-eeb0-49da-acdf-53b7741e863e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 629.830574] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33dba060-59c8-4e8c-92c4-50f2601e6cb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.838269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.838552] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.838962] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba3f2d4b-954f-4826-ab3c-1614d6cd267f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.843020] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.843020] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef4d01f1-8dbf-4410-a0f0-0558ee2fcd03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.854575] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.859063] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 629.859063] env[69994]: value = "task-2924930" [ 629.859063] env[69994]: _type = "Task" [ 629.859063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.859418] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.859647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 629.860872] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c2551f3-0b3d-4780-b586-23d561b6875f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.866626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.866952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquired lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.867107] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.872148] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 629.872148] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c371af-cd73-aba7-cdd0-2ec7f05157ad" [ 629.872148] env[69994]: _type = "Task" [ 629.872148] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.876452] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.890292] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c371af-cd73-aba7-cdd0-2ec7f05157ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.891026] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-199fb760-2020-4618-88ef-0b1bafcc9ba7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.897330] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 629.897330] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237c129-b859-8a6f-6d8d-3f1b29903d45" [ 629.897330] env[69994]: _type = "Task" [ 629.897330] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.905755] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237c129-b859-8a6f-6d8d-3f1b29903d45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.000363] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Updated VIF entry in instance network info cache for port c87f9440-73bd-4854-863b-5e6a47bb7faf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.001095] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Updating instance_info_cache with network_info: [{"id": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "address": "fa:16:3e:31:7f:43", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc87f9440-73", "ovs_interfaceid": "c87f9440-73bd-4854-863b-5e6a47bb7faf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.068129] env[69994]: DEBUG nova.compute.manager [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Received event network-vif-plugged-6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.068277] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.071435] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.071435] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.071435] env[69994]: DEBUG nova.compute.manager [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] No waiting events found dispatching network-vif-plugged-6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.071435] env[69994]: WARNING nova.compute.manager [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Received unexpected event network-vif-plugged-6634b7a0-01a3-49e4-a7ac-6f8572d86925 for instance with vm_state building and task_state spawning. [ 630.071435] env[69994]: DEBUG nova.compute.manager [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Received event network-changed-6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.072550] env[69994]: DEBUG nova.compute.manager [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Refreshing instance network info cache due to event network-changed-6634b7a0-01a3-49e4-a7ac-6f8572d86925. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 630.072550] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Acquiring lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.072550] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Acquired lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.072550] env[69994]: DEBUG nova.network.neutron [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Refreshing network info cache for port 6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.152811] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924928, 'name': CreateVM_Task, 'duration_secs': 0.32081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.153149] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.155540] env[69994]: DEBUG oslo_vmware.service [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873dcb41-c453-469e-ae4a-7c33162545d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.162306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.162488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.162910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 630.163345] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24455582-4f5e-4f1d-9ea5-55e207c8fdbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.168401] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 630.168401] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a3fe3e-0c3f-4d81-f926-8f1c50dd4666" [ 630.168401] env[69994]: _type = "Task" [ 630.168401] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.178829] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a3fe3e-0c3f-4d81-f926-8f1c50dd4666, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.184326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.184326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.332673] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.332968] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.334032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181e6f58-dd20-4acd-8f5c-639606d09e7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.363635] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] dc548f2f-e6d6-4273-8c24-b4f52842e0d2/dc548f2f-e6d6-4273-8c24-b4f52842e0d2.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.366785] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a54e7ee8-0c38-4e65-95e3-13e332e48f9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.402060] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924930, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.406872] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 630.406872] env[69994]: value = "task-2924931" [ 630.406872] env[69994]: _type = "Task" [ 630.406872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.408385] env[69994]: ERROR nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [req-a3458aaf-58e4-4ecb-9c22-e833693aaba4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a3458aaf-58e4-4ecb-9c22-e833693aaba4"}]} [ 630.418457] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237c129-b859-8a6f-6d8d-3f1b29903d45, 'name': SearchDatastore_Task, 'duration_secs': 0.009745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.419259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.419585] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 317e3366-4aec-4c80-bcf9-df84bc5e9939/317e3366-4aec-4c80-bcf9-df84bc5e9939.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.419886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.420097] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.420366] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5ab4170-1d87-4fea-987e-7e0aa27ffffa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.434148] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92dcb6cc-d561-412a-a67f-5b819fde75bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.436540] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.438044] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 630.445431] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 630.445431] env[69994]: value = "task-2924932" [ 630.445431] env[69994]: _type = "Task" [ 630.445431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.455812] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.457256] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.457427] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 630.458325] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-460ae621-06f4-4f6a-929b-4247a8ab7ad9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.462249] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 630.462249] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.467324] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 630.467324] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d250cf-08f5-1352-0a48-e6e83668d779" [ 630.467324] env[69994]: _type = "Task" [ 630.467324] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.476404] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d250cf-08f5-1352-0a48-e6e83668d779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.477550] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 630.479993] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.503157] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 630.505630] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Releasing lock "refresh_cache-1d5b8fb7-eeb0-49da-acdf-53b7741e863e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.505907] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Received event network-vif-plugged-cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.506149] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquiring lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.506388] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.506576] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.506778] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] No waiting events found dispatching network-vif-plugged-cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.506975] env[69994]: WARNING nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Received unexpected event network-vif-plugged-cc0658e0-6fc9-45af-9d60-534898bf6858 for instance with vm_state building and task_state spawning. [ 630.507171] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Received event network-changed-cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.507356] env[69994]: DEBUG nova.compute.manager [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Refreshing instance network info cache due to event network-changed-cc0658e0-6fc9-45af-9d60-534898bf6858. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 630.507566] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquiring lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.507701] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Acquired lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.507882] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Refreshing network info cache for port cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 630.670131] env[69994]: DEBUG nova.compute.manager [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Received event network-vif-plugged-aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.670408] env[69994]: DEBUG oslo_concurrency.lockutils [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] Acquiring lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.670632] env[69994]: DEBUG oslo_concurrency.lockutils [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] Lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.670880] env[69994]: DEBUG oslo_concurrency.lockutils [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] Lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.671040] env[69994]: DEBUG nova.compute.manager [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] No waiting events found dispatching network-vif-plugged-aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.671336] env[69994]: WARNING nova.compute.manager [req-560dcd47-500a-4399-ad46-a51dbce725a7 req-aa1e51db-bba2-44f3-88c6-2204befd6399 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Received unexpected event network-vif-plugged-aa2b9475-c88c-41e6-be6f-249869384580 for instance with vm_state building and task_state spawning. [ 630.692268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.692268] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.692268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.692268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.692486] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.692486] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8ee89e2-3839-41a7-b94e-d3d0685917c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.715617] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.715617] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 630.716306] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2a61ea-dad2-4018-9a5d-973a4598bb5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.731434] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff650fe1-b1f0-4c1d-b247-1992d7286b9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.742532] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 630.742532] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528eebfa-386e-83a0-db25-ce304b487e87" [ 630.742532] env[69994]: _type = "Task" [ 630.742532] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.755455] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528eebfa-386e-83a0-db25-ce304b487e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.769949] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30449ee2-2d52-428d-8e09-064027d27fc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.779521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8468eddd-21f4-4dac-913f-cff0595d45b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.823320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6a33fd-6dd6-4aaf-a6b2-7517fc2b071f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.836529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365fa18d-5acc-47bc-95eb-17476aa33e1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.856666] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.882769] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.882769] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1d5b8fb7-eeb0-49da-acdf-53b7741e863e/1d5b8fb7-eeb0-49da-acdf-53b7741e863e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.882769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.882769] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa9fe37e-67e6-4579-8865-da5bb443d1e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.888154] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 630.888154] env[69994]: value = "task-2924933" [ 630.888154] env[69994]: _type = "Task" [ 630.888154] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.899922] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.900841] env[69994]: DEBUG nova.network.neutron [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Updating instance_info_cache with network_info: [{"id": "aa2b9475-c88c-41e6-be6f-249869384580", "address": "fa:16:3e:50:bc:b5", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2b9475-c8", "ovs_interfaceid": "aa2b9475-c88c-41e6-be6f-249869384580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.920187] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924931, 'name': ReconfigVM_Task, 'duration_secs': 0.330495} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.920399] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Reconfigured VM instance instance-00000001 to attach disk [datastore1] dc548f2f-e6d6-4273-8c24-b4f52842e0d2/dc548f2f-e6d6-4273-8c24-b4f52842e0d2.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.921119] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8caaeb4f-f92c-4a70-9917-cff9c312df66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.928277] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 630.928277] env[69994]: value = "task-2924934" [ 630.928277] env[69994]: _type = "Task" [ 630.928277] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.937042] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924934, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.959097] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924932, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.959217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 317e3366-4aec-4c80-bcf9-df84bc5e9939/317e3366-4aec-4c80-bcf9-df84bc5e9939.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.959805] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.962801] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ab1ab28-1edd-4a7b-b0a1-2431e212ce1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.972347] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 630.972347] env[69994]: value = "task-2924935" [ 630.972347] env[69994]: _type = "Task" [ 630.972347] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.980549] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d250cf-08f5-1352-0a48-e6e83668d779, 'name': SearchDatastore_Task, 'duration_secs': 0.009256} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.981689] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71d0eebb-8b16-4d4f-8172-fd00a4a6cc2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.987429] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.990328] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 630.990328] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5213ccae-a14c-4cb1-c452-c7a00172627d" [ 630.990328] env[69994]: _type = "Task" [ 630.990328] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.999230] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5213ccae-a14c-4cb1-c452-c7a00172627d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.109657] env[69994]: DEBUG nova.network.neutron [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updated VIF entry in instance network info cache for port 6634b7a0-01a3-49e4-a7ac-6f8572d86925. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.110042] env[69994]: DEBUG nova.network.neutron [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.255878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 631.256634] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating directory with path [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.256634] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d200292-2859-48e0-b6b2-36b86a9f6f86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.273737] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Created directory with path [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.273947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Fetch image to [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 631.274197] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Downloading image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk on the data store datastore2 {{(pid=69994) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 631.275188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd1f63a-3ef3-41e8-a076-f4ee7bd27f37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.283996] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed8324e-6b5e-4887-8d57-2c8f980759f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.296773] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b15c47c-1cd9-4101-bbb0-11851da9ea02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.339233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37f9e20-3ece-42f4-b9a6-80df0e2ffa81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.345914] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ee144c56-e5dc-4df4-9d20-529e204a0e5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.377663] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Downloading image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to the data store datastore2 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 631.402521] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077402} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.402521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.404197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfcfb6a-96ec-4946-9ccb-6f986833d136 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.411711] env[69994]: DEBUG nova.scheduler.client.report [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 18 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 631.411965] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 18 to 19 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 631.412164] env[69994]: DEBUG nova.compute.provider_tree [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 631.419018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Releasing lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.419018] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Instance network_info: |[{"id": "aa2b9475-c88c-41e6-be6f-249869384580", "address": "fa:16:3e:50:bc:b5", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2b9475-c8", "ovs_interfaceid": "aa2b9475-c88c-41e6-be6f-249869384580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.419240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:bc:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa2b9475-c88c-41e6-be6f-249869384580', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.424562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Creating folder: Project (227f00137ddc436abf0892d7d9ae73ef). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.428716] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d12d8f80-837e-402b-bcc2-855b954f129d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.450483] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 1d5b8fb7-eeb0-49da-acdf-53b7741e863e/1d5b8fb7-eeb0-49da-acdf-53b7741e863e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.460976] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06feb7a8-da8a-44f9-9e37-67659d1527de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.480310] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Created folder: Project (227f00137ddc436abf0892d7d9ae73ef) in parent group-v587342. [ 631.480310] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Creating folder: Instances. Parent ref: group-v587358. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.485020] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7177f5da-8874-4f66-9aa6-772af13c0455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.489373] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924934, 'name': Rename_Task, 'duration_secs': 0.152211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.492354] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.492478] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 631.492478] env[69994]: value = "task-2924937" [ 631.492478] env[69994]: _type = "Task" [ 631.492478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.492651] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f996b1f4-ff75-422c-8a7f-d891023af9de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.506059] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06265} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.510085] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.510570] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435af5df-05d4-47ca-8773-3a1c26491c2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.522599] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5213ccae-a14c-4cb1-c452-c7a00172627d, 'name': SearchDatastore_Task, 'duration_secs': 0.008398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.522599] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Created folder: Instances in parent group-v587358. [ 631.522678] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.522898] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.523581] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 631.523581] env[69994]: value = "task-2924939" [ 631.523581] env[69994]: _type = "Task" [ 631.523581] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.525776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.525776] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 631.525776] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.525776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c09a162-4d33-48ca-875d-bb8914b70ba2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.527879] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81e22b88-a103-47ad-8821-7c179788e9ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.548669] env[69994]: DEBUG oslo_vmware.rw_handles [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 631.614982] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.614982] env[69994]: value = "task-2924941" [ 631.614982] env[69994]: _type = "Task" [ 631.614982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.615375] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 631.615375] env[69994]: value = "task-2924940" [ 631.615375] env[69994]: _type = "Task" [ 631.615375] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.615579] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924939, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.629024] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8725e3b-f024-4312-922e-87ea88d9d64f req-fda70068-fddf-4aaf-8937-1b8d8f4ca3d9 service nova] Releasing lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.637182] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 317e3366-4aec-4c80-bcf9-df84bc5e9939/317e3366-4aec-4c80-bcf9-df84bc5e9939.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.645485] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0f8a422-f159-473a-8108-ee0a7a2e56dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.665831] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924941, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.670053] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 631.670053] env[69994]: value = "task-2924942" [ 631.670053] env[69994]: _type = "Task" [ 631.670053] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.670446] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.682371] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.917520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.442s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.918400] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.921776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.921967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.922138] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 631.922439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.501s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.926139] env[69994]: INFO nova.compute.claims [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.933985] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f29742e-78da-4f41-acf9-3a29d4952f85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.953653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa91066-c565-485a-9a73-26d8ecb1e0ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.958515] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Updated VIF entry in instance network info cache for port cc0658e0-6fc9-45af-9d60-534898bf6858. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 631.958818] env[69994]: DEBUG nova.network.neutron [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Updating instance_info_cache with network_info: [{"id": "cc0658e0-6fc9-45af-9d60-534898bf6858", "address": "fa:16:3e:be:63:1e", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0658e0-6f", "ovs_interfaceid": "cc0658e0-6fc9-45af-9d60-534898bf6858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.984036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ce392b-47de-45ba-a40d-ad7761507b7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.995828] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2525d71-00e7-4bc9-a40c-0e485774251b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.045271] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924937, 'name': ReconfigVM_Task, 'duration_secs': 0.29623} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.045565] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181116MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 632.046626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.049743] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 1d5b8fb7-eeb0-49da-acdf-53b7741e863e/1d5b8fb7-eeb0-49da-acdf-53b7741e863e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.054412] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a2cff06-ac59-46d0-948a-169c4dc31b00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.063197] env[69994]: DEBUG oslo_vmware.api [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924939, 'name': PowerOnVM_Task, 'duration_secs': 0.548655} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.065315] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.065845] env[69994]: INFO nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Took 14.24 seconds to spawn the instance on the hypervisor. [ 632.066129] env[69994]: DEBUG nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.066503] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 632.066503] env[69994]: value = "task-2924943" [ 632.066503] env[69994]: _type = "Task" [ 632.066503] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.067308] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49744374-43a4-4dce-9c10-3a7d63bbf446 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.084689] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924943, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.143752] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924940, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.151203] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924941, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.187878] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.357640] env[69994]: DEBUG oslo_vmware.rw_handles [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 632.357640] env[69994]: DEBUG oslo_vmware.rw_handles [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 632.432812] env[69994]: DEBUG nova.compute.utils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 632.435171] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 632.461721] env[69994]: DEBUG oslo_concurrency.lockutils [req-7f622538-d959-4907-91ac-dc21e3589ced req-495d1fb5-7d8b-49f8-a101-ac449d6b19c0 service nova] Releasing lock "refresh_cache-317e3366-4aec-4c80-bcf9-df84bc5e9939" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.500116] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Downloaded image file data f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk on the data store datastore2 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 632.502200] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 632.503025] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copying Virtual Disk [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk to [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.503025] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e7dd9d9-c790-49f5-95c9-489a08ea0ba5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.510094] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 632.510094] env[69994]: value = "task-2924944" [ 632.510094] env[69994]: _type = "Task" [ 632.510094] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.522708] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.580177] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924943, 'name': Rename_Task, 'duration_secs': 0.275847} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.580177] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.580177] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08ae35d0-7259-4b21-bc0e-bbd6aae1d0f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.586364] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 632.586364] env[69994]: value = "task-2924945" [ 632.586364] env[69994]: _type = "Task" [ 632.586364] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.603917] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924945, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.610699] env[69994]: INFO nova.compute.manager [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Took 19.06 seconds to build instance. [ 632.650986] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924941, 'name': CreateVM_Task, 'duration_secs': 0.748274} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.655364] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.655951] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711477} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.656632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.656772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.657087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.657395] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 632.659055] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.659055] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3c30db0-bc66-4b5c-ab5d-078ced553c42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.659594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dedc9d79-0430-4350-a8e2-a2c5a19478b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.667042] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 632.667042] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5215fc53-e289-7375-ee60-992bad2088ae" [ 632.667042] env[69994]: _type = "Task" [ 632.667042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.672034] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 632.672034] env[69994]: value = "task-2924946" [ 632.672034] env[69994]: _type = "Task" [ 632.672034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.680196] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5215fc53-e289-7375-ee60-992bad2088ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.696362] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924942, 'name': ReconfigVM_Task, 'duration_secs': 0.746585} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.696561] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.696910] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 317e3366-4aec-4c80-bcf9-df84bc5e9939/317e3366-4aec-4c80-bcf9-df84bc5e9939.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.697806] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f677a3a6-e2ed-4e0b-9a6f-02ef39bce6d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.705622] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 632.705622] env[69994]: value = "task-2924947" [ 632.705622] env[69994]: _type = "Task" [ 632.705622] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.715428] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924947, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.939937] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.028281] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924944, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.098413] env[69994]: DEBUG oslo_vmware.api [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2924945, 'name': PowerOnVM_Task, 'duration_secs': 0.486329} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.098706] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.098953] env[69994]: INFO nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Took 12.99 seconds to spawn the instance on the hypervisor. [ 633.099180] env[69994]: DEBUG nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.101532] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d248bb71-68fc-4126-a525-1b687b19f32a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.118712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-413f4065-0daf-4a0a-8ba0-e496497c41bd tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.579s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.182812] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5215fc53-e289-7375-ee60-992bad2088ae, 'name': SearchDatastore_Task, 'duration_secs': 0.015568} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.183519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.183854] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.184114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.184314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.184554] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.185908] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c0af218-b3f4-41e6-9025-e7cb8e6fa337 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.195018] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076231} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.195018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.196663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d143f893-13ca-4fd5-b5b0-e25b2ef93f5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.205603] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.206244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.207315] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf8768d5-167a-4f87-8216-190cb670b206 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.233184] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.237142] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b49e08d-15c4-488d-8efc-d878c8d7cbd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.256657] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 633.256657] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e5267f-9ca0-281a-8362-73f597390757" [ 633.256657] env[69994]: _type = "Task" [ 633.256657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.257372] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924947, 'name': Rename_Task, 'duration_secs': 0.1908} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.258011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.261353] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2dbca0b-4b7d-46c3-8756-0e2e8e0ce5f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.270963] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 633.270963] env[69994]: value = "task-2924948" [ 633.270963] env[69994]: _type = "Task" [ 633.270963] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.278143] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e5267f-9ca0-281a-8362-73f597390757, 'name': SearchDatastore_Task, 'duration_secs': 0.012319} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.279600] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 633.279600] env[69994]: value = "task-2924949" [ 633.279600] env[69994]: _type = "Task" [ 633.279600] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.279817] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e6efa40-1506-4572-9ffb-ee5aecc2f249 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.290516] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.290855] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b7c8b8-685a-4d5e-a5b4-d2079347c20c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.297042] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 633.297042] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5210249d-ecb9-569d-de85-563a7f3ebc20" [ 633.297042] env[69994]: _type = "Task" [ 633.297042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.303717] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924949, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.305529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e294f270-8b0b-4084-854e-50d77ab0df4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.313642] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5210249d-ecb9-569d-de85-563a7f3ebc20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.341876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb264534-6bd2-4642-9602-26b6b516439a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.349932] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ec514c-f419-41bb-811a-1290d54e340d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.363868] env[69994]: DEBUG nova.compute.provider_tree [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.524820] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924944, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722497} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.525810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copied Virtual Disk [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk to [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 633.526037] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleting the datastore file [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/tmp-sparse.vmdk {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.526650] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58820bd5-22e6-4bf6-ab2c-1fd6553c83df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.534500] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 633.534500] env[69994]: value = "task-2924950" [ 633.534500] env[69994]: _type = "Task" [ 633.534500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.546197] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.561553] env[69994]: DEBUG nova.compute.manager [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Received event network-changed-aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 633.561553] env[69994]: DEBUG nova.compute.manager [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Refreshing instance network info cache due to event network-changed-aa2b9475-c88c-41e6-be6f-249869384580. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 633.561703] env[69994]: DEBUG oslo_concurrency.lockutils [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] Acquiring lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.561798] env[69994]: DEBUG oslo_concurrency.lockutils [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] Acquired lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.561943] env[69994]: DEBUG nova.network.neutron [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Refreshing network info cache for port aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.632405] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.642922] env[69994]: INFO nova.compute.manager [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Took 17.98 seconds to build instance. [ 633.783117] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924948, 'name': ReconfigVM_Task, 'duration_secs': 0.383286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.787781] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.788475] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-072f5ebc-d728-4c38-873d-90c5c402d061 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.795858] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924949, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.797565] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 633.797565] env[69994]: value = "task-2924951" [ 633.797565] env[69994]: _type = "Task" [ 633.797565] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.820830] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924951, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.820830] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5210249d-ecb9-569d-de85-563a7f3ebc20, 'name': SearchDatastore_Task, 'duration_secs': 0.020256} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.820830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.820830] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1232f601-3339-4fc2-92b2-aa550af90b01/1232f601-3339-4fc2-92b2-aa550af90b01.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 633.821219] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d67c328-cbcd-49cc-aeff-a8eb0af1ec40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.829116] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 633.829116] env[69994]: value = "task-2924952" [ 633.829116] env[69994]: _type = "Task" [ 633.829116] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.839261] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.869240] env[69994]: DEBUG nova.scheduler.client.report [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 633.953432] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 633.989783] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.989783] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.989979] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.990217] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.990380] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.990702] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.990792] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.993261] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.993261] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.993261] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.993261] env[69994]: DEBUG nova.virt.hardware [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.993261] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3298e71a-5a4b-413a-9dab-7d7564306f3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.003804] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74feb84d-c075-488a-bcfb-95e74790a3fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.021647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.028938] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Creating folder: Project (0da0c9f5a9ee41ffaf26a5fcfd539ae6). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.029851] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c33e870e-4bfe-4d22-b68d-056ac63c20b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.046832] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0255} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.048678] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.049305] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Moving file from [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 to [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48. {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 634.049305] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Created folder: Project (0da0c9f5a9ee41ffaf26a5fcfd539ae6) in parent group-v587342. [ 634.049305] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Creating folder: Instances. Parent ref: group-v587361. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 634.049559] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-5770e27f-14ad-4aa4-874d-87fad6c6809d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.055022] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef3cbf63-4f1e-4bac-9be5-2ff1d9d18276 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.058079] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 634.058079] env[69994]: value = "task-2924954" [ 634.058079] env[69994]: _type = "Task" [ 634.058079] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.067076] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Created folder: Instances in parent group-v587361. [ 634.067292] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.069570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 634.070112] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-226bf7bd-a331-4d37-9d2e-3b0c14149466 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.086264] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924954, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.090550] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.090550] env[69994]: value = "task-2924956" [ 634.090550] env[69994]: _type = "Task" [ 634.090550] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.099118] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924956, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.148481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c820b44f-bb6c-47ed-87e4-6b7dbc74040f tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.498s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.190707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.297719] env[69994]: DEBUG oslo_vmware.api [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924949, 'name': PowerOnVM_Task, 'duration_secs': 0.540592} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.298211] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.300119] env[69994]: INFO nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Took 11.94 seconds to spawn the instance on the hypervisor. [ 634.300119] env[69994]: DEBUG nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.300119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a7c6df-b93f-428b-bad8-0cc292b3fbeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.324331] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924951, 'name': Rename_Task, 'duration_secs': 0.310745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.326134] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 634.326270] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc097c13-09f7-4a41-8100-1e640381a03c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.335802] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 634.335802] env[69994]: value = "task-2924957" [ 634.335802] env[69994]: _type = "Task" [ 634.335802] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.341822] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924952, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.347293] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.380034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.380034] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 634.382422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.341s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.384715] env[69994]: INFO nova.compute.claims [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.568461] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924954, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.038588} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.568753] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] File moved {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 634.569043] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Cleaning up location [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 634.569139] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleting the datastore file [datastore2] vmware_temp/25b4abf4-0880-44fb-93a0-55818289e209 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.569444] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8469fc93-d9ab-49a7-92e3-06bc53934e93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.576242] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 634.576242] env[69994]: value = "task-2924958" [ 634.576242] env[69994]: _type = "Task" [ 634.576242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.585684] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.611736] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924956, 'name': CreateVM_Task, 'duration_secs': 0.388293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.611736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.612485] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.612574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.612932] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 634.613236] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a83b47d6-d632-4e27-bd54-c43514b7b952 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.619021] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 634.619021] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5230b8b9-3961-e4ef-e5bb-e9cb36f4d4b0" [ 634.619021] env[69994]: _type = "Task" [ 634.619021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.630949] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5230b8b9-3961-e4ef-e5bb-e9cb36f4d4b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.659781] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.795413] env[69994]: DEBUG nova.network.neutron [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Updated VIF entry in instance network info cache for port aa2b9475-c88c-41e6-be6f-249869384580. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.795856] env[69994]: DEBUG nova.network.neutron [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Updating instance_info_cache with network_info: [{"id": "aa2b9475-c88c-41e6-be6f-249869384580", "address": "fa:16:3e:50:bc:b5", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa2b9475-c8", "ovs_interfaceid": "aa2b9475-c88c-41e6-be6f-249869384580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.840155] env[69994]: INFO nova.compute.manager [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Took 18.70 seconds to build instance. [ 634.852346] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.855794] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1232f601-3339-4fc2-92b2-aa550af90b01/1232f601-3339-4fc2-92b2-aa550af90b01.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 634.856082] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 634.856372] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924957, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.856584] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66179833-1b1c-4179-8b1f-ff8f9638126e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.864414] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 634.864414] env[69994]: value = "task-2924959" [ 634.864414] env[69994]: _type = "Task" [ 634.864414] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.879673] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924959, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.892433] env[69994]: DEBUG nova.compute.utils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 634.896903] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 634.897165] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 635.028642] env[69994]: DEBUG nova.policy [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5667c67d9b6f44138d1479e901b60c74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34469ad51e694a3389595c28ef508144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 635.093162] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031131} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.093162] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.093162] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ab45c34-f935-47e7-a880-4b6eb49dc1bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.105103] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 635.105103] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52af557c-99aa-381d-337b-97fcb50c35ba" [ 635.105103] env[69994]: _type = "Task" [ 635.105103] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.114134] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52af557c-99aa-381d-337b-97fcb50c35ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.130256] env[69994]: DEBUG nova.compute.manager [None req-d20dc835-a3d9-4f4a-bd9d-559334a273d8 tempest-ServerDiagnosticsTest-2038179331 tempest-ServerDiagnosticsTest-2038179331-project-admin] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 635.134905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be77c314-afaa-4a10-88ed-1f505e7e79c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.142261] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5230b8b9-3961-e4ef-e5bb-e9cb36f4d4b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011372} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.144103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.144103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.144103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.148826] env[69994]: INFO nova.compute.manager [None req-d20dc835-a3d9-4f4a-bd9d-559334a273d8 tempest-ServerDiagnosticsTest-2038179331 tempest-ServerDiagnosticsTest-2038179331-project-admin] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Retrieving diagnostics [ 635.149986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f5904c-71cf-46a1-9c30-399cf6e8dfac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.216200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.301189] env[69994]: DEBUG oslo_concurrency.lockutils [req-57626678-3439-46ce-b631-3e80b108c1b5 req-41e63fc7-a443-417c-a286-b7bb7c3cd292 service nova] Releasing lock "refresh_cache-1232f601-3339-4fc2-92b2-aa550af90b01" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.347157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ce5fd9d-006c-44e5-89d1-f92184fd402d tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.215s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.356040] env[69994]: DEBUG oslo_vmware.api [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2924957, 'name': PowerOnVM_Task, 'duration_secs': 0.691047} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.356423] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 635.356637] env[69994]: INFO nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Took 10.73 seconds to spawn the instance on the hypervisor. [ 635.356805] env[69994]: DEBUG nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 635.357676] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64db0055-9418-4da0-838f-0ea0792952b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.378662] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924959, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.378662] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.379392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2534c7-30cc-4955-8161-27310ee23d7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.405854] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 635.419331] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 1232f601-3339-4fc2-92b2-aa550af90b01/1232f601-3339-4fc2-92b2-aa550af90b01.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.425605] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe84ff76-0bae-4b42-b74e-ba7d5ea8688f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.458335] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 635.458335] env[69994]: value = "task-2924960" [ 635.458335] env[69994]: _type = "Task" [ 635.458335] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.469926] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.625260] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52af557c-99aa-381d-337b-97fcb50c35ba, 'name': SearchDatastore_Task, 'duration_secs': 0.014878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.629583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.629976] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.632830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.636427] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.636427] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27de6283-df59-41e7-8635-d5263fd587a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.639504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "803e9885-000f-4696-9fb9-03361ef46538" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.639778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.642883] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d9bbd6c-d94a-40ce-8d5e-8449c8064f15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.652505] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 635.652505] env[69994]: value = "task-2924961" [ 635.652505] env[69994]: _type = "Task" [ 635.652505] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.653782] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.653945] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 635.657995] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e04ce8-4372-4ea0-b633-a31e8efc29c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.668024] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.670010] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 635.670010] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52832ef4-7ec0-1f78-eb12-c231a6491dd0" [ 635.670010] env[69994]: _type = "Task" [ 635.670010] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.682264] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52832ef4-7ec0-1f78-eb12-c231a6491dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.778502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e75ead-c70d-485f-84ce-e96345ea2b70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.787774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fa3b46-1b88-4e86-966c-7184055a1706 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.831182] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Successfully created port: a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.833115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272bbe7b-e2c7-4d61-8d31-095a9ce0a250 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.841805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b826edd8-0cca-4ab6-89b4-8aaa7ec3bb16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.850490] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 635.863664] env[69994]: DEBUG nova.compute.provider_tree [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.883218] env[69994]: INFO nova.compute.manager [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Took 19.11 seconds to build instance. [ 635.947251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.947251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.973326] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924960, 'name': ReconfigVM_Task, 'duration_secs': 0.514189} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.973326] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 1232f601-3339-4fc2-92b2-aa550af90b01/1232f601-3339-4fc2-92b2-aa550af90b01.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.974495] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e144e649-8163-4c3c-aeab-069d0f36705b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.985636] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 635.985636] env[69994]: value = "task-2924962" [ 635.985636] env[69994]: _type = "Task" [ 635.985636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.998954] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924962, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.164682] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924961, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.186444] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52832ef4-7ec0-1f78-eb12-c231a6491dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.021705} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.187376] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3164f403-e1cb-4495-b6d6-eff7cfc8cb44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.194939] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 636.194939] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8293b-6c4e-047b-b01f-3c0c778d9aeb" [ 636.194939] env[69994]: _type = "Task" [ 636.194939] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.204072] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8293b-6c4e-047b-b01f-3c0c778d9aeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.367024] env[69994]: DEBUG nova.scheduler.client.report [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 636.383018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.386870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-683b75fd-b9a4-4d7a-b81a-d28734f1e558 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.627s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.428116] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 636.468869] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 636.470125] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.470125] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 636.470125] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.470299] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 636.470580] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 636.471920] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 636.471920] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 636.471920] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 636.471920] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 636.471920] env[69994]: DEBUG nova.virt.hardware [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 636.472737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fa3c0d-ab12-414e-a897-dee27cfc8575 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.486956] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e49ed08-f9c6-43c2-8ace-f89250765dd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.511735] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924962, 'name': Rename_Task, 'duration_secs': 0.264941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.512027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 636.512270] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11deb43a-89cc-414b-9bb5-1d95fd24a169 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.519784] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 636.519784] env[69994]: value = "task-2924963" [ 636.519784] env[69994]: _type = "Task" [ 636.519784] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.527659] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.665351] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.665739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 636.665911] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 636.666184] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68efaaea-05d1-4e91-9bd4-45d1ed30f660 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.677816] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 636.677816] env[69994]: value = "task-2924964" [ 636.677816] env[69994]: _type = "Task" [ 636.677816] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.691377] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.707306] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8293b-6c4e-047b-b01f-3c0c778d9aeb, 'name': SearchDatastore_Task, 'duration_secs': 0.057289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.707480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.707739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 48f6ebca-d7fe-4086-80f4-0b89789dcddb/48f6ebca-d7fe-4086-80f4-0b89789dcddb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 636.708008] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b283a8a1-f51a-4ded-b925-835432e0ac49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.716967] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 636.716967] env[69994]: value = "task-2924965" [ 636.716967] env[69994]: _type = "Task" [ 636.716967] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.723778] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.876212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.876786] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 636.879641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.836s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.881151] env[69994]: INFO nova.compute.claims [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.895034] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 637.030400] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924963, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.195175] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.195453] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 637.196278] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0857bfd-9bc3-4661-8ffb-3526d0acc8fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.222924] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 637.223198] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dad015e7-bf77-4ff1-a4b3-bfc73870f067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.247203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.248442] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.257897] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924965, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.258240] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 637.258240] env[69994]: value = "task-2924966" [ 637.258240] env[69994]: _type = "Task" [ 637.258240] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.387222] env[69994]: DEBUG nova.compute.utils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 637.391584] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 637.391831] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 637.436382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.473099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Acquiring lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.473487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.473709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Acquiring lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.473899] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.474093] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.476410] env[69994]: INFO nova.compute.manager [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Terminating instance [ 637.531668] env[69994]: DEBUG oslo_vmware.api [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924963, 'name': PowerOnVM_Task, 'duration_secs': 0.868911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.531976] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 637.532185] env[69994]: INFO nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Took 10.93 seconds to spawn the instance on the hypervisor. [ 637.532363] env[69994]: DEBUG nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.533170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71eb63c3-7518-4380-8a64-f83fe0f6cea4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.561440] env[69994]: DEBUG nova.policy [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5ff918e9e2f43c3bed5606522e29267', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '140aca79c0564bc598abc301d4ecce26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 637.742891] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.743492] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 48f6ebca-d7fe-4086-80f4-0b89789dcddb/48f6ebca-d7fe-4086-80f4-0b89789dcddb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 637.743492] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 637.744613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef6d49bb-3cab-4ad8-bf49-c276e527cb04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.750880] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 637.750880] env[69994]: value = "task-2924967" [ 637.750880] env[69994]: _type = "Task" [ 637.750880] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.760048] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.775108] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.896940] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 637.982277] env[69994]: DEBUG nova.compute.manager [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 637.982277] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 637.982277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f85828-fb99-4a36-b940-6c27b579431c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.998515] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.001296] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b26290ea-86e1-43e3-a2eb-6a94d3313bc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.008083] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Waiting for the task: (returnval){ [ 638.008083] env[69994]: value = "task-2924968" [ 638.008083] env[69994]: _type = "Task" [ 638.008083] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.019726] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.060235] env[69994]: INFO nova.compute.manager [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Took 21.23 seconds to build instance. [ 638.149296] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Successfully updated port: a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.262773] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.273111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 638.277123] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12412a6a-e36f-4d92-9d13-87fa2b98ecbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.289555] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924966, 'name': ReconfigVM_Task, 'duration_secs': 0.736311} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.299057] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 638.308951] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 48f6ebca-d7fe-4086-80f4-0b89789dcddb/48f6ebca-d7fe-4086-80f4-0b89789dcddb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 638.311852] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7934aec7-605b-4f3e-9eab-7c8f5d4cf74b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.316053] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0896863-60ce-43ee-bf1c-de0470def7c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.334306] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 638.334306] env[69994]: value = "task-2924969" [ 638.334306] env[69994]: _type = "Task" [ 638.334306] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.335860] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 638.335860] env[69994]: value = "task-2924970" [ 638.335860] env[69994]: _type = "Task" [ 638.335860] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.347651] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924969, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.351213] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924970, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.389430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c20a0d-b446-404a-980d-8299647c7110 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.401134] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a086b8e2-f00a-400c-8007-69fa0888c1fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.404614] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.404614] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.404737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.404841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.405060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.412032] env[69994]: INFO nova.compute.manager [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Terminating instance [ 638.447275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf344f0-82a4-4c29-8919-6d00062ac440 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.453851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a15c781-7f0a-4b90-af5d-e33fdc619568 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.470777] env[69994]: DEBUG nova.compute.provider_tree [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.518060] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.536269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.536503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.562061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36556af4-4503-4727-a60a-47689216b599 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.750s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.578674] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Successfully created port: bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 638.655200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.655547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.655934] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.849722] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.852882] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924969, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.913595] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 638.949041] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.949041] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.949041] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.949224] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.949224] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.949224] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.950370] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.951767] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.951767] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.951767] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.951767] env[69994]: DEBUG nova.virt.hardware [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.952158] env[69994]: DEBUG nova.compute.manager [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 638.952294] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.953177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7430775-b281-496c-88fa-c6aa291336dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.962440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26847de3-ce26-46d0-b001-8c149b6d2aef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.973057] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.973376] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1e085d0-25ba-4b99-8aaa-b5d9c528210c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.976835] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca9f66e-0b42-41de-947f-94ccf111e8f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.981449] env[69994]: DEBUG nova.scheduler.client.report [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 638.998619] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 638.998619] env[69994]: value = "task-2924971" [ 638.998619] env[69994]: _type = "Task" [ 638.998619] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.011376] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.020961] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.065717] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.195206] env[69994]: DEBUG nova.compute.manager [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Received event network-vif-plugged-a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 639.195432] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] Acquiring lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.195636] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.195791] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.197152] env[69994]: DEBUG nova.compute.manager [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] No waiting events found dispatching network-vif-plugged-a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 639.197615] env[69994]: WARNING nova.compute.manager [req-0c4bb538-beff-4ea2-a8b4-2e38eb602f75 req-4b5c35ea-e918-4b5d-93f5-fca9c2f3acb7 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Received unexpected event network-vif-plugged-a81f7c8f-8499-4f74-8860-fca65590ea7b for instance with vm_state building and task_state spawning. [ 639.312009] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.356077] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924969, 'name': Rename_Task, 'duration_secs': 0.787211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.363579] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.363931] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924970, 'name': ReconfigVM_Task, 'duration_secs': 0.792392} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.364157] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7da89d66-d7fa-49c8-a3bb-8052d55d13e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.366025] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 48f6ebca-d7fe-4086-80f4-0b89789dcddb/48f6ebca-d7fe-4086-80f4-0b89789dcddb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.366647] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a86b05a9-7d7a-4ef6-9040-51ce0b3000f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.373306] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 639.373306] env[69994]: value = "task-2924973" [ 639.373306] env[69994]: _type = "Task" [ 639.373306] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.377122] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 639.377122] env[69994]: value = "task-2924972" [ 639.377122] env[69994]: _type = "Task" [ 639.377122] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.391692] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924973, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.396322] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.489454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.490139] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 639.495170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.449s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.510361] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924971, 'name': PowerOffVM_Task, 'duration_secs': 0.231235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.510361] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.511470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.511470] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c297d9c2-cf96-45b6-b681-a56fd531a5a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.523314] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924968, 'name': PowerOffVM_Task, 'duration_secs': 1.157336} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.523723] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 639.523814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 639.524118] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f984d8b7-94ab-4e3c-98fc-4ae9e04c0ef4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.598913] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.599144] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.599272] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleting the datastore file [datastore1] dc548f2f-e6d6-4273-8c24-b4f52842e0d2 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.600430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.600768] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d39b71d2-9df0-4c28-9759-901742c92f34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.608536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 639.608770] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 639.609173] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Deleting the datastore file [datastore1] 1d5b8fb7-eeb0-49da-acdf-53b7741e863e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.609173] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14c47b6e-c415-470d-a938-2f7beed5bae8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.613369] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for the task: (returnval){ [ 639.613369] env[69994]: value = "task-2924976" [ 639.613369] env[69994]: _type = "Task" [ 639.613369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.621291] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Waiting for the task: (returnval){ [ 639.621291] env[69994]: value = "task-2924977" [ 639.621291] env[69994]: _type = "Task" [ 639.621291] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.628145] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.632996] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.889399] env[69994]: DEBUG nova.network.neutron [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Updating instance_info_cache with network_info: [{"id": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "address": "fa:16:3e:37:df:4b", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81f7c8f-84", "ovs_interfaceid": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.898247] env[69994]: DEBUG oslo_vmware.api [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924972, 'name': PowerOnVM_Task, 'duration_secs': 0.469216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.898777] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924973, 'name': Rename_Task, 'duration_secs': 0.138784} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.899196] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 639.900020] env[69994]: INFO nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Took 10.40 seconds to spawn the instance on the hypervisor. [ 639.900020] env[69994]: DEBUG nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 639.900020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.901386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213659e0-d6f0-4a7b-9d6c-d8b890fc4884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.904465] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de5621b3-0683-4edd-903c-727a35789c7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.917033] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 639.917033] env[69994]: value = "task-2924978" [ 639.917033] env[69994]: _type = "Task" [ 639.917033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.926604] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.999297] env[69994]: DEBUG nova.compute.utils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.007048] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.007589] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.124550] env[69994]: DEBUG oslo_vmware.api [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Task: {'id': task-2924976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259756} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.127873] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.128104] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.128285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.129014] env[69994]: INFO nova.compute.manager [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Took 1.18 seconds to destroy the instance on the hypervisor. [ 640.129014] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.129221] env[69994]: DEBUG nova.compute.manager [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.129276] env[69994]: DEBUG nova.network.neutron [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.136940] env[69994]: DEBUG oslo_vmware.api [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Task: {'id': task-2924977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.138963] env[69994]: DEBUG nova.policy [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5667c67d9b6f44138d1479e901b60c74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34469ad51e694a3389595c28ef508144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 640.140653] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.140953] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 640.141275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.141545] env[69994]: INFO nova.compute.manager [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Took 2.16 seconds to destroy the instance on the hypervisor. [ 640.141941] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.142966] env[69994]: DEBUG nova.compute.manager [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 640.143198] env[69994]: DEBUG nova.network.neutron [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 640.391949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.393136] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance network_info: |[{"id": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "address": "fa:16:3e:37:df:4b", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81f7c8f-84", "ovs_interfaceid": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.393620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:df:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a81f7c8f-8499-4f74-8860-fca65590ea7b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.406383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating folder: Project (34469ad51e694a3389595c28ef508144). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.406692] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7a70fce-73ed-4f42-bd89-e532d7714582 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.419522] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created folder: Project (34469ad51e694a3389595c28ef508144) in parent group-v587342. [ 640.419522] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating folder: Instances. Parent ref: group-v587364. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.419522] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3c743db-0f9e-4aab-ad37-1062223a5b7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.431589] env[69994]: INFO nova.compute.manager [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Took 23.36 seconds to build instance. [ 640.438698] env[69994]: DEBUG oslo_vmware.api [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2924978, 'name': PowerOnVM_Task, 'duration_secs': 0.445101} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.442638] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.443020] env[69994]: INFO nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Took 6.49 seconds to spawn the instance on the hypervisor. [ 640.443340] env[69994]: DEBUG nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.444051] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created folder: Instances in parent group-v587364. [ 640.444051] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.444851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956b09f5-b107-4ffa-8b10-d711d8f57966 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.447657] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.448465] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4eea503c-4047-4389-8172-1230eb2498dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.476732] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.476732] env[69994]: value = "task-2924981" [ 640.476732] env[69994]: _type = "Task" [ 640.476732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.484591] env[69994]: DEBUG nova.compute.manager [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 640.493523] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924981, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.503800] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 640.512384] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating resource usage from migration e07e0aa8-0c00-41d2-b1b6-07ee708e59ff [ 640.550065] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance dc548f2f-e6d6-4273-8c24-b4f52842e0d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.550760] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1d5b8fb7-eeb0-49da-acdf-53b7741e863e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.550891] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 317e3366-4aec-4c80-bcf9-df84bc5e9939 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551153] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1232f601-3339-4fc2-92b2-aa550af90b01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551273] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551384] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 48f6ebca-d7fe-4086-80f4-0b89789dcddb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551493] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 298a4d59-733f-4cda-a9c2-80dc21be91ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551599] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 7ea91d3b-1e43-45cd-9bff-e144c63177c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.551704] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 627f89ad-0381-4de9-a429-c74e26975ce9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 640.944384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-07ddaee4-9c7c-4f19-87cd-4f84a0e999ec tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.881s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.990461] env[69994]: INFO nova.compute.manager [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Took 22.57 seconds to build instance. [ 641.020618] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924981, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.029173] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.059579] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 443382a8-64af-4f13-b7ab-11234fb13fcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.381841] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Successfully updated port: bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.450497] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.500358] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924981, 'name': CreateVM_Task, 'duration_secs': 0.561489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.504029] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.504029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "1232f601-3339-4fc2-92b2-aa550af90b01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.504029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.504029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.504337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.504337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.505865] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9b4f7305-3ede-4bf8-bb84-7fdc0f42238a tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.098s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.506310] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.506458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.506812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.508157] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceab325b-2771-44b8-aec7-93eb50ffc28e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.511278] env[69994]: INFO nova.compute.manager [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Terminating instance [ 641.517559] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 641.517559] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522610a8-1246-c39a-8138-d055511cac87" [ 641.517559] env[69994]: _type = "Task" [ 641.517559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.528794] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 641.532161] env[69994]: DEBUG nova.network.neutron [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.546613] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522610a8-1246-c39a-8138-d055511cac87, 'name': SearchDatastore_Task, 'duration_secs': 0.01915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.550739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.551000] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.551323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.551484] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.551725] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.552043] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2b7105e-fe7e-4668-a0e2-17bbffa03bc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.565345] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 75e952e7-6761-49a4-9193-175f5d30494e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 641.568290] env[69994]: DEBUG nova.compute.manager [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Received event network-vif-plugged-bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 641.568290] env[69994]: DEBUG oslo_concurrency.lockutils [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] Acquiring lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.568290] env[69994]: DEBUG oslo_concurrency.lockutils [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.568290] env[69994]: DEBUG oslo_concurrency.lockutils [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.568290] env[69994]: DEBUG nova.compute.manager [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] No waiting events found dispatching network-vif-plugged-bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 641.568665] env[69994]: WARNING nova.compute.manager [req-eea7111e-99dc-4eb5-9c18-467dc05a8504 req-f309c686-4982-42f1-a84f-ba377be68bc0 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Received unexpected event network-vif-plugged-bffe8ff2-1bac-4992-8b93-aef1a09a525b for instance with vm_state building and task_state spawning. [ 641.570816] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.571060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.574079] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 641.574326] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.574488] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 641.574671] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.574810] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 641.574950] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 641.575171] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 641.575322] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 641.575481] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 641.575633] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 641.575812] env[69994]: DEBUG nova.virt.hardware [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 641.576069] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d58577fd-fbd6-404c-9ac3-6158b4821ba2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.579990] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1312edac-f05b-4221-8cac-11a98a01fc1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.590832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc0eb8a-e971-4fab-9da7-8d4b7400174d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.595270] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 641.595270] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5256e13a-24aa-0262-1009-a5f15c2a2681" [ 641.595270] env[69994]: _type = "Task" [ 641.595270] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.614475] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5256e13a-24aa-0262-1009-a5f15c2a2681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.669979] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Successfully created port: f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.866266] env[69994]: DEBUG nova.network.neutron [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.885235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.885235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquired lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.885235] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.987283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.011590] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.020432] env[69994]: DEBUG nova.compute.manager [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 642.020813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 642.022129] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412edd8f-eeed-464c-aedc-a997072c363f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.033653] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 642.035524] env[69994]: INFO nova.compute.manager [-] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Took 1.89 seconds to deallocate network for instance. [ 642.036099] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62d1cc9a-b37d-43b2-8774-696185a442d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.058261] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 642.058261] env[69994]: value = "task-2924982" [ 642.058261] env[69994]: _type = "Task" [ 642.058261] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.076180] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9e9973e1-feb8-4fd7-95ae-e6d824af5a64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 642.078766] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.108441] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5256e13a-24aa-0262-1009-a5f15c2a2681, 'name': SearchDatastore_Task, 'duration_secs': 0.038536} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.108441] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4093219b-38c2-44ba-908c-6ece88073c3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.113832] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 642.113832] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c15503-afc8-5d5d-ac66-f04ea31758a5" [ 642.113832] env[69994]: _type = "Task" [ 642.113832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.125149] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c15503-afc8-5d5d-ac66-f04ea31758a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.261764] env[69994]: DEBUG nova.compute.manager [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Received event network-changed-a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 642.262057] env[69994]: DEBUG nova.compute.manager [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Refreshing instance network info cache due to event network-changed-a81f7c8f-8499-4f74-8860-fca65590ea7b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 642.262319] env[69994]: DEBUG oslo_concurrency.lockutils [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] Acquiring lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.262477] env[69994]: DEBUG oslo_concurrency.lockutils [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] Acquired lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.263357] env[69994]: DEBUG nova.network.neutron [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Refreshing network info cache for port a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.370812] env[69994]: INFO nova.compute.manager [-] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Took 2.24 seconds to deallocate network for instance. [ 642.427556] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.521706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.521989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.523291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.523521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.523699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.542558] env[69994]: INFO nova.compute.manager [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Terminating instance [ 642.557373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.560470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.572173] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924982, 'name': PowerOffVM_Task, 'duration_secs': 0.261487} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.572173] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 642.572353] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 642.572539] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b57f1363-532b-4bda-9473-972cd9381779 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.581121] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f3ae584d-18a5-4bbe-b4bf-860e2332b324 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 642.630696] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c15503-afc8-5d5d-ac66-f04ea31758a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.631058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.631420] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.631750] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e3dd9d7-ce27-4bb3-96ff-fcacaa765cb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.640390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 642.640390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 642.641680] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Deleting the datastore file [datastore1] 1232f601-3339-4fc2-92b2-aa550af90b01 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 642.642473] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebe1fb78-c914-4a2d-9ae5-8f1ee9c23a79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.645399] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 642.645399] env[69994]: value = "task-2924984" [ 642.645399] env[69994]: _type = "Task" [ 642.645399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.656886] env[69994]: DEBUG nova.network.neutron [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Updating instance_info_cache with network_info: [{"id": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "address": "fa:16:3e:52:e1:1f", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbffe8ff2-1b", "ovs_interfaceid": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.658937] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for the task: (returnval){ [ 642.658937] env[69994]: value = "task-2924985" [ 642.658937] env[69994]: _type = "Task" [ 642.658937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.670998] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.677307] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.702938] env[69994]: DEBUG nova.compute.manager [None req-1ca1119b-7a4c-43b8-a8f3-bcb081149dad tempest-ServerDiagnosticsV248Test-70835274 tempest-ServerDiagnosticsV248Test-70835274-project-admin] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.703979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96b4edf-21f7-4ccb-9841-f101098e6345 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.712017] env[69994]: INFO nova.compute.manager [None req-1ca1119b-7a4c-43b8-a8f3-bcb081149dad tempest-ServerDiagnosticsV248Test-70835274 tempest-ServerDiagnosticsV248Test-70835274-project-admin] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Retrieving diagnostics [ 642.712249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1132ae8-b4db-4480-8b25-9dbacc11e82b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.885638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.051483] env[69994]: DEBUG nova.compute.manager [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 643.051805] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 643.055990] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdd8470-74fb-4213-970d-e53307c1e215 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.068165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 643.068456] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-751a4177-8068-4436-8327-6ee75a591dba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.078367] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 643.078367] env[69994]: value = "task-2924986" [ 643.078367] env[69994]: _type = "Task" [ 643.078367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.084415] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 803e9885-000f-4696-9fb9-03361ef46538 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.094656] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.158858] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924984, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.170990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Releasing lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.170990] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Instance network_info: |[{"id": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "address": "fa:16:3e:52:e1:1f", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbffe8ff2-1b", "ovs_interfaceid": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 643.171243] env[69994]: DEBUG oslo_vmware.api [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Task: {'id': task-2924985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.171355] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:e1:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bffe8ff2-1bac-4992-8b93-aef1a09a525b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.182205] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Creating folder: Project (140aca79c0564bc598abc301d4ecce26). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.182205] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 643.182205] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 643.182205] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.182205] env[69994]: INFO nova.compute.manager [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Took 1.16 seconds to destroy the instance on the hypervisor. [ 643.182515] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.182515] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ca52543-d66e-4edd-bf8a-681fc75f92d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.184049] env[69994]: DEBUG nova.compute.manager [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 643.184169] env[69994]: DEBUG nova.network.neutron [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.196822] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Created folder: Project (140aca79c0564bc598abc301d4ecce26) in parent group-v587342. [ 643.197086] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Creating folder: Instances. Parent ref: group-v587367. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.197373] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91e8aa10-cff5-4e94-ae53-32f699084b27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.212635] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Created folder: Instances in parent group-v587367. [ 643.212635] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.212635] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.212635] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2d5266c-c6f1-4ef6-8052-acdbfa360b78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.233949] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.233949] env[69994]: value = "task-2924989" [ 643.233949] env[69994]: _type = "Task" [ 643.233949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.248510] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924989, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.284427] env[69994]: DEBUG nova.network.neutron [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Updated VIF entry in instance network info cache for port a81f7c8f-8499-4f74-8860-fca65590ea7b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.284818] env[69994]: DEBUG nova.network.neutron [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Updating instance_info_cache with network_info: [{"id": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "address": "fa:16:3e:37:df:4b", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa81f7c8f-84", "ovs_interfaceid": "a81f7c8f-8499-4f74-8860-fca65590ea7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.588773] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924986, 'name': PowerOffVM_Task, 'duration_secs': 0.379171} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.589219] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 643.589490] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 643.589799] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c0e3fe8-d468-468a-b97f-30b8ec8e6f31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.596031] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.659681] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.660106] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.660349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.660603] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79f1fe25-ce32-4c6c-901b-44f24117e1d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.669127] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 643.669127] env[69994]: value = "task-2924991" [ 643.669127] env[69994]: _type = "Task" [ 643.669127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.676815] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 643.677972] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 643.677972] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Deleting the datastore file [datastore1] 317e3366-4aec-4c80-bcf9-df84bc5e9939 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 643.681943] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db7eb2d6-2fdd-4e03-9aba-5700055d4885 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.684338] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.689284] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for the task: (returnval){ [ 643.689284] env[69994]: value = "task-2924992" [ 643.689284] env[69994]: _type = "Task" [ 643.689284] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.699345] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.753485] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924989, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.792175] env[69994]: DEBUG oslo_concurrency.lockutils [req-241cfed8-8bc2-41a4-8982-4d4f2fe7f99c req-a20b0eda-4cb3-481b-bf5a-2cc6e0ff832d service nova] Releasing lock "refresh_cache-298a4d59-733f-4cda-a9c2-80dc21be91ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.907664] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Successfully updated port: f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 644.099806] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9717f586-cedc-4f21-9ea6-7bf6e2991327 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 644.146159] env[69994]: DEBUG nova.network.neutron [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.180189] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.180473] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.181836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa185550-2291-4724-af20-7a650cc730ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.211750] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.218992] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9431f5f-0645-4f2b-9236-b102702b9bda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.240709] env[69994]: DEBUG oslo_vmware.api [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Task: {'id': task-2924992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315265} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.242892] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 644.243104] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 644.243317] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 644.243500] env[69994]: INFO nova.compute.manager [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Took 1.19 seconds to destroy the instance on the hypervisor. [ 644.243736] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 644.244128] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 644.244128] env[69994]: value = "task-2924993" [ 644.244128] env[69994]: _type = "Task" [ 644.244128] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.244367] env[69994]: DEBUG nova.compute.manager [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 644.244468] env[69994]: DEBUG nova.network.neutron [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 644.273891] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924989, 'name': CreateVM_Task, 'duration_secs': 0.671521} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.274612] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.274612] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.275252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.275364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.275672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.276686] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38acdfec-9312-4f6e-9493-fce69444ecf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.284263] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 644.284263] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524b0520-84ee-a80f-90e9-61a783659deb" [ 644.284263] env[69994]: _type = "Task" [ 644.284263] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.294954] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524b0520-84ee-a80f-90e9-61a783659deb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.345874] env[69994]: INFO nova.compute.manager [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Rebuilding instance [ 644.411524] env[69994]: DEBUG nova.compute.manager [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.412348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3273564e-cbee-4138-9e61-f560be51f831 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.415819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.415819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.415959] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.604840] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 6aacfc4e-32b4-40d7-8240-e4449cf78925 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 644.605135] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration e07e0aa8-0c00-41d2-b1b6-07ee708e59ff is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 644.605135] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 63d6a59a-d58c-4179-ad39-eb9863e6f84c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 644.606025] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 644.606025] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 644.652164] env[69994]: INFO nova.compute.manager [-] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Took 1.47 seconds to deallocate network for instance. [ 644.762237] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.799017] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524b0520-84ee-a80f-90e9-61a783659deb, 'name': SearchDatastore_Task, 'duration_secs': 0.010543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.799201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.799430] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.799685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.799790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.799971] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.802804] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18a80b9e-f057-46c4-ad1e-d2c75ea1040d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.811499] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.811714] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.812634] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f84fe6dc-e7ca-4694-98a3-942985338291 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.819306] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 644.819306] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131f1b-ec7f-ca71-86e4-3d4708f0bdf0" [ 644.819306] env[69994]: _type = "Task" [ 644.819306] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.830466] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131f1b-ec7f-ca71-86e4-3d4708f0bdf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.987953] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.000614] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad14b8f-6794-4b00-8b9a-47b5827248d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.008674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e43885-a552-45e6-983d-0f466a9bf258 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.054882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da7ba1-4a63-4418-b097-2cdce9087a44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.064758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3725ea13-c920-4f0a-a76c-abacdac596dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.070016] env[69994]: DEBUG nova.network.neutron [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.083895] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.165930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.267850] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924993, 'name': ReconfigVM_Task, 'duration_secs': 0.574005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.267850] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.267850] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de925403-6f59-4fbf-9baa-414a97d51947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.276069] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 645.276069] env[69994]: value = "task-2924994" [ 645.276069] env[69994]: _type = "Task" [ 645.276069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.287471] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924994, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.328014] env[69994]: DEBUG nova.network.neutron [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Updating instance_info_cache with network_info: [{"id": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "address": "fa:16:3e:00:c0:a7", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a4dbcb-4a", "ovs_interfaceid": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.336105] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131f1b-ec7f-ca71-86e4-3d4708f0bdf0, 'name': SearchDatastore_Task, 'duration_secs': 0.01686} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.337748] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75ecd4e7-5b29-4b97-af44-b516defd5bf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.343911] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 645.343911] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b87b91-4c50-d9a9-86c0-619bb34f25f0" [ 645.343911] env[69994]: _type = "Task" [ 645.343911] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.352833] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b87b91-4c50-d9a9-86c0-619bb34f25f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.430513] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 645.430666] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-506c8a21-f280-4e21-a43a-f9c2668a5d77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.441783] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 645.441783] env[69994]: value = "task-2924995" [ 645.441783] env[69994]: _type = "Task" [ 645.441783] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.453594] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.573030] env[69994]: INFO nova.compute.manager [-] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Took 1.33 seconds to deallocate network for instance. [ 645.586812] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 645.746250] env[69994]: DEBUG nova.compute.manager [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Received event network-vif-deleted-c87f9440-73bd-4854-863b-5e6a47bb7faf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.746534] env[69994]: DEBUG nova.compute.manager [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Received event network-changed-bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.746635] env[69994]: DEBUG nova.compute.manager [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Refreshing instance network info cache due to event network-changed-bffe8ff2-1bac-4992-8b93-aef1a09a525b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 645.748020] env[69994]: DEBUG oslo_concurrency.lockutils [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] Acquiring lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.748020] env[69994]: DEBUG oslo_concurrency.lockutils [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] Acquired lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.748020] env[69994]: DEBUG nova.network.neutron [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Refreshing network info cache for port bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.786826] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924994, 'name': Rename_Task, 'duration_secs': 0.185307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.787272] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.787602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-936f4bb6-50aa-4051-884a-89c0f7dea342 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.796232] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 645.796232] env[69994]: value = "task-2924996" [ 645.796232] env[69994]: _type = "Task" [ 645.796232] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.806460] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.839952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.840430] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Instance network_info: |[{"id": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "address": "fa:16:3e:00:c0:a7", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a4dbcb-4a", "ovs_interfaceid": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 645.840834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:c0:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.850253] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 645.850613] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 645.855102] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed41475c-935b-43b8-95e4-c31982cb1e02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.881556] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b87b91-4c50-d9a9-86c0-619bb34f25f0, 'name': SearchDatastore_Task, 'duration_secs': 0.009292} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.882975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.883293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 7ea91d3b-1e43-45cd-9bff-e144c63177c8/7ea91d3b-1e43-45cd-9bff-e144c63177c8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.883547] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.883547] env[69994]: value = "task-2924997" [ 645.883547] env[69994]: _type = "Task" [ 645.883547] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.884113] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51e18b41-8f08-4fb8-bb93-d981b5b8a102 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.894669] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924997, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.896054] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 645.896054] env[69994]: value = "task-2924998" [ 645.896054] env[69994]: _type = "Task" [ 645.896054] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.904531] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2924998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.953881] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2924995, 'name': PowerOffVM_Task, 'duration_secs': 0.146763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.954247] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 645.954635] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.955477] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc34762-97f5-4b5a-b367-7c3e81dc2bf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.966115] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 645.966115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03d635c0-551d-4f0a-94cf-d70a88c20017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.999109] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 645.999109] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 645.999109] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleting the datastore file [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 645.999498] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fbbc11c-de21-4abb-81f1-b192a8e9a65a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.008715] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 646.008715] env[69994]: value = "task-2925000" [ 646.008715] env[69994]: _type = "Task" [ 646.008715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.020701] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.039034] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Received event network-vif-deleted-3f5f9119-ace0-4dc6-85ae-35541cd46022 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 646.039034] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Received event network-vif-deleted-aa2b9475-c88c-41e6-be6f-249869384580 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 646.039034] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Received event network-vif-plugged-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 646.039034] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Acquiring lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.039034] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.039325] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.039325] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] No waiting events found dispatching network-vif-plugged-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 646.039325] env[69994]: WARNING nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Received unexpected event network-vif-plugged-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 for instance with vm_state building and task_state spawning. [ 646.039325] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Received event network-changed-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 646.039325] env[69994]: DEBUG nova.compute.manager [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Refreshing instance network info cache due to event network-changed-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 646.039471] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Acquiring lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.039796] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Acquired lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.039937] env[69994]: DEBUG nova.network.neutron [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Refreshing network info cache for port f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 646.080153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.092759] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 646.094105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.599s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.094412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.904s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.097099] env[69994]: INFO nova.compute.claims [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.316204] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924996, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.378746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "70e5674d-4627-4720-9b87-955c2749e010" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.378987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.401445] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924997, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.413250] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2924998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.414039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 7ea91d3b-1e43-45cd-9bff-e144c63177c8/7ea91d3b-1e43-45cd-9bff-e144c63177c8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.414369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.414591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9b1284a-bead-4f65-aeb0-9953f24079a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.425540] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 646.425540] env[69994]: value = "task-2925001" [ 646.425540] env[69994]: _type = "Task" [ 646.425540] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.437293] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.520834] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400129} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.520834] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 646.520834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 646.520834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 646.819785] env[69994]: DEBUG oslo_vmware.api [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2924996, 'name': PowerOnVM_Task, 'duration_secs': 0.732889} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.821389] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.821389] env[69994]: INFO nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Took 10.39 seconds to spawn the instance on the hypervisor. [ 646.821389] env[69994]: DEBUG nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.822241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67e45c3-1aa5-4e14-b337-e058dda3ac9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.836769] env[69994]: DEBUG nova.network.neutron [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Updated VIF entry in instance network info cache for port bffe8ff2-1bac-4992-8b93-aef1a09a525b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 646.836769] env[69994]: DEBUG nova.network.neutron [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Updating instance_info_cache with network_info: [{"id": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "address": "fa:16:3e:52:e1:1f", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbffe8ff2-1b", "ovs_interfaceid": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.901426] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924997, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.939286] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077178} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.940744] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.941857] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cfbd31-aec3-4cfc-b000-6babbff732af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.970853] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 7ea91d3b-1e43-45cd-9bff-e144c63177c8/7ea91d3b-1e43-45cd-9bff-e144c63177c8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.975796] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8a7832e-046a-4297-b134-f4e6681b04f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.999369] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 646.999369] env[69994]: value = "task-2925002" [ 646.999369] env[69994]: _type = "Task" [ 646.999369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.105746] env[69994]: DEBUG nova.network.neutron [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Updated VIF entry in instance network info cache for port f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 647.106468] env[69994]: DEBUG nova.network.neutron [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Updating instance_info_cache with network_info: [{"id": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "address": "fa:16:3e:00:c0:a7", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a4dbcb-4a", "ovs_interfaceid": "f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.342182] env[69994]: DEBUG oslo_concurrency.lockutils [req-85d3e3c1-bf26-4416-8dec-8efb018927a9 req-e1474f52-27ce-4ad0-a3f9-abdc121a04f5 service nova] Releasing lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.359783] env[69994]: INFO nova.compute.manager [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Took 25.96 seconds to build instance. [ 647.403804] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2924997, 'name': CreateVM_Task, 'duration_secs': 1.185906} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.406505] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.411196] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.411196] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.411196] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 647.411196] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e59c348-7719-4efd-b911-836125b17838 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.415404] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 647.415404] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52128050-ff3a-668b-a41c-6024e8ce1a28" [ 647.415404] env[69994]: _type = "Task" [ 647.415404] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.426710] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52128050-ff3a-668b-a41c-6024e8ce1a28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.492387] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecae4c25-91d9-4ea3-83c1-85d9b7355087 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.506433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9791e31f-d633-4743-9c4b-85909b3bb863 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.550843] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925002, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.551895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36db968-35d3-44f7-9823-fc1f24f744f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.559874] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4e2e05-8027-4204-8885-5d916b7f7a1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.582386] env[69994]: DEBUG nova.compute.provider_tree [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.596690] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.596937] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.597112] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.597302] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.597600] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.597600] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.600640] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.600918] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.601324] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.601324] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.601507] env[69994]: DEBUG nova.virt.hardware [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.602730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465dfe65-0cbf-4e11-b801-01abd858ee23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.611979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bacf66-04b8-4eef-9d63-813325ebd064 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.621612] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e24703e-0b05-4584-b203-01c6d0cab757 req-0052f0eb-8f5a-43e6-a6e3-4ff252a58f20 service nova] Releasing lock "refresh_cache-627f89ad-0381-4de9-a429-c74e26975ce9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.645100] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 647.650778] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 647.651110] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 647.651331] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5a0e62a-8466-41f5-a8d4-0d01aff868ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.671891] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.671891] env[69994]: value = "task-2925003" [ 647.671891] env[69994]: _type = "Task" [ 647.671891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.681696] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925003, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.863774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-180bced5-0ffd-4812-a6fc-3686661d4540 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.478s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.929961] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52128050-ff3a-668b-a41c-6024e8ce1a28, 'name': SearchDatastore_Task, 'duration_secs': 0.010846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.930381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.930662] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.930939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.931140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.931332] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.931611] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c81422e8-c1e7-40b5-ba75-b4d0510caab2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.941787] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.941905] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 647.943214] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-905ec321-fbce-4ca7-86d7-10c807fcdbce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.950452] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 647.950452] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5228c96d-cf2c-267e-15ef-fffd16c7d765" [ 647.950452] env[69994]: _type = "Task" [ 647.950452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.959409] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5228c96d-cf2c-267e-15ef-fffd16c7d765, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.012646] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925002, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.088409] env[69994]: DEBUG nova.scheduler.client.report [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.188946] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925003, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.367057] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.467723] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5228c96d-cf2c-267e-15ef-fffd16c7d765, 'name': SearchDatastore_Task, 'duration_secs': 0.011118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.468892] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75dedc3e-046b-4ce8-9d74-68cfc02dc679 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.478354] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 648.478354] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5243038e-f948-a278-9a79-960636e9bfd4" [ 648.478354] env[69994]: _type = "Task" [ 648.478354] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.494255] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5243038e-f948-a278-9a79-960636e9bfd4, 'name': SearchDatastore_Task, 'duration_secs': 0.012307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.495018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.495018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 627f89ad-0381-4de9-a429-c74e26975ce9/627f89ad-0381-4de9-a429-c74e26975ce9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 648.495208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47ed7a78-0a92-42e8-b180-b2356e683abe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.507490] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 648.507490] env[69994]: value = "task-2925004" [ 648.507490] env[69994]: _type = "Task" [ 648.507490] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.521407] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925002, 'name': ReconfigVM_Task, 'duration_secs': 1.465198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.524885] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 7ea91d3b-1e43-45cd-9bff-e144c63177c8/7ea91d3b-1e43-45cd-9bff-e144c63177c8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.525619] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.526066] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-023bd440-73b2-474a-b946-468eb92ef84b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.534430] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 648.534430] env[69994]: value = "task-2925005" [ 648.534430] env[69994]: _type = "Task" [ 648.534430] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.544270] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925005, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.594939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.594939] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.597543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.382s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.599205] env[69994]: INFO nova.compute.claims [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.702596] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925003, 'name': CreateVM_Task, 'duration_secs': 0.989849} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.702817] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 648.703340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.703497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.703820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 648.704102] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9ec7fcf-69f1-4a5e-aea7-7b9d2ede558d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.712866] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 648.712866] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d01b26-1909-dc5a-8705-91f1c2e9643d" [ 648.712866] env[69994]: _type = "Task" [ 648.712866] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.729706] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d01b26-1909-dc5a-8705-91f1c2e9643d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.908538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.030441] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925004, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.048796] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925005, 'name': Rename_Task, 'duration_secs': 0.162799} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.049130] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.049410] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8b830f7-a137-4d3f-9d0b-e9979fb86c6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.058638] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 649.058638] env[69994]: value = "task-2925006" [ 649.058638] env[69994]: _type = "Task" [ 649.058638] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.072062] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.105151] env[69994]: DEBUG nova.compute.utils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.109290] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 649.109576] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.120473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "55dd32b0-e67f-4943-86e8-b9956267fedc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.120473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.185656] env[69994]: DEBUG nova.compute.manager [req-aefcc4c4-5674-4edc-b9ba-8d9ba450e0f5 req-feb8b6c9-4685-4d3b-9d29-d395c68a97db service nova] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Received event network-vif-deleted-cc0658e0-6fc9-45af-9d60-534898bf6858 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.197974] env[69994]: DEBUG nova.policy [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3898aac480114c27a31fcb3a3e1105a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c74711a6c2c746d6aad95a38262c88a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 649.224113] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d01b26-1909-dc5a-8705-91f1c2e9643d, 'name': SearchDatastore_Task, 'duration_secs': 0.020054} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.224420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.224639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.224857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.224993] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.225180] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.225445] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82cb1e0b-c7ab-4bf4-9480-ce3e70a178bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.235713] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.236522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.236654] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8707fc1f-baf0-4815-be4c-aca6bdab47df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.242801] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 649.242801] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265307e-3c72-e2e9-c970-c913728f8171" [ 649.242801] env[69994]: _type = "Task" [ 649.242801] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.257504] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265307e-3c72-e2e9-c970-c913728f8171, 'name': SearchDatastore_Task} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.258589] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af39584-ba17-46df-882f-e1bcf4e42cf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.272415] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 649.272415] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520b73d7-2663-7f73-01c7-a8530d908602" [ 649.272415] env[69994]: _type = "Task" [ 649.272415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.280849] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520b73d7-2663-7f73-01c7-a8530d908602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.482689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f424f023-1364-4469-b3b5-6de6b57666ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.492319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b440e4-0c91-4353-8945-d99970c1e8b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.531014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246afe54-da5d-4e6e-86bf-1a34ad825607 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.539396] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925004, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.542645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa2cd55-18ae-496b-9f83-8de43e90b545 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.561125] env[69994]: DEBUG nova.compute.provider_tree [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.572966] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.608706] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 649.698067] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Successfully created port: 70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.784106] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520b73d7-2663-7f73-01c7-a8530d908602, 'name': SearchDatastore_Task, 'duration_secs': 0.010954} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.784106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.784422] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 649.789322] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4efb80f0-ba5e-4528-933c-fac221577968 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.794696] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 649.794696] env[69994]: value = "task-2925007" [ 649.794696] env[69994]: _type = "Task" [ 649.794696] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.806096] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.044521] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925004, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.067801] env[69994]: DEBUG nova.scheduler.client.report [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.079707] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.313805] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925007, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.495818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.496102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.546181] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925004, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.541798} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.546544] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 627f89ad-0381-4de9-a429-c74e26975ce9/627f89ad-0381-4de9-a429-c74e26975ce9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 650.546761] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 650.547037] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06138ab2-cb9d-4796-9bbf-e5590dd42559 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.557186] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 650.557186] env[69994]: value = "task-2925008" [ 650.557186] env[69994]: _type = "Task" [ 650.557186] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.575121] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.977s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.575821] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.579292] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.584685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.201s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.585778] env[69994]: INFO nova.compute.claims [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.589702] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.629498] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 650.667481] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 650.667702] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.667861] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 650.668054] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.668203] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 650.668351] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 650.668608] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 650.668712] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 650.668878] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 650.669456] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 650.669836] env[69994]: DEBUG nova.virt.hardware [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 650.671372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb38d13d-62d1-41d7-bc5c-4e002af6c05b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.682159] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773b570b-3a60-4ff6-9d3d-c33a8239c3ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.814021] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.814021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 650.814021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 650.814021] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0233d99-6a71-47b7-9439-f022d9cfe956 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.824047] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 650.824047] env[69994]: value = "task-2925009" [ 650.824047] env[69994]: _type = "Task" [ 650.824047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.833091] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.071653] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133677} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.072762] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 651.074149] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba544282-a350-43ee-8b2c-c980a3df5328 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.084248] env[69994]: DEBUG nova.compute.utils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 651.086787] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.087546] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.087734] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.111724] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 627f89ad-0381-4de9-a429-c74e26975ce9/627f89ad-0381-4de9-a429-c74e26975ce9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 651.112883] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecbb411f-00c5-4cd5-b239-4c5f57453501 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.140282] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 651.140282] env[69994]: value = "task-2925010" [ 651.140282] env[69994]: _type = "Task" [ 651.140282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.154677] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.201520] env[69994]: DEBUG nova.policy [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '103c37daf3984c408ddffcbc2f7eb49d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e32425291ea4111ad7aae069c945b1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 651.338791] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082611} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.338791] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 651.339407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ecc574-86ec-40bc-9650-21f4dbd68cef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.373399] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 651.373399] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ce0dbd1-edbb-4c74-bda7-eb195f555add {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.397093] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 651.397093] env[69994]: value = "task-2925011" [ 651.397093] env[69994]: _type = "Task" [ 651.397093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.408853] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.573713] env[69994]: DEBUG oslo_vmware.api [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925006, 'name': PowerOnVM_Task, 'duration_secs': 2.342627} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.573992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.574233] env[69994]: INFO nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Took 12.66 seconds to spawn the instance on the hypervisor. [ 651.574424] env[69994]: DEBUG nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.575305] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcaf36b-88ec-48a2-96ac-dc3afae9b15b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.593587] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.651529] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925010, 'name': ReconfigVM_Task, 'duration_secs': 0.320232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.654339] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 627f89ad-0381-4de9-a429-c74e26975ce9/627f89ad-0381-4de9-a429-c74e26975ce9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.655954] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3dafceda-1ff0-44c7-bf5e-f445d8d8fb35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.664674] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 651.664674] env[69994]: value = "task-2925012" [ 651.664674] env[69994]: _type = "Task" [ 651.664674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.678840] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925012, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.817501] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Successfully created port: d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.839968] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Successfully updated port: 70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.912837] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.036625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e3db60-dc82-4a91-b3eb-50687ecf29b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.046551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b664f360-07f7-4f01-8613-9adf83e6f1fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.082095] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56e92e9-8c48-4fc9-9ff0-024d1c2d06c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.095916] env[69994]: INFO nova.compute.manager [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Took 29.07 seconds to build instance. [ 652.098876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa5be0b-2c90-451c-a159-8740039b9418 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.120111] env[69994]: DEBUG nova.compute.provider_tree [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.175368] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925012, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.346218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.346218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquired lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.346218] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.413662] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925011, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.500757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.501074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.598352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fa93175b-dfac-4b58-9677-f288a66bdce8 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.583s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.608612] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.623625] env[69994]: DEBUG nova.scheduler.client.report [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.645481] env[69994]: DEBUG nova.compute.manager [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Received event network-vif-plugged-70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.645735] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] Acquiring lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.645956] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.646135] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.646312] env[69994]: DEBUG nova.compute.manager [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] No waiting events found dispatching network-vif-plugged-70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.647015] env[69994]: WARNING nova.compute.manager [req-4cf394ad-9db7-49d3-8e1a-6e5c10439355 req-0e426e27-8503-450f-b677-65b79245a06f service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Received unexpected event network-vif-plugged-70b00ea1-8747-4c12-8a6c-5abf6c3669f2 for instance with vm_state building and task_state spawning. [ 652.648741] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.649027] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.649124] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.649352] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.649445] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.649649] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.649778] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.649939] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.650115] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.650277] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.650450] env[69994]: DEBUG nova.virt.hardware [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.652311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0cc74b-8244-464f-a493-6d27e2f6041b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.661871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2a6ff3-9d8a-4b00-bba2-083730f65f44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.685096] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925012, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.897333] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.911302] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925011, 'name': ReconfigVM_Task, 'duration_secs': 1.056852} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.912365] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 652.913644] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28bc30c4-a1d0-46a2-a1c5-47ee4c699c31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.926777] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 652.926777] env[69994]: value = "task-2925013" [ 652.926777] env[69994]: _type = "Task" [ 652.926777] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.940711] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925013, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.105512] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.125810] env[69994]: DEBUG nova.network.neutron [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Updating instance_info_cache with network_info: [{"id": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "address": "fa:16:3e:49:bc:e1", "network": {"id": "299ed161-1495-41eb-8daf-65fcb8c6d076", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2112394148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c74711a6c2c746d6aad95a38262c88a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b00ea1-87", "ovs_interfaceid": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.129053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.129561] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.132395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.696s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.134158] env[69994]: INFO nova.compute.claims [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.179733] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925012, 'name': Rename_Task, 'duration_secs': 1.154019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.181034] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.181308] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d839dca-57c5-49ef-a1a1-00e48c7b7a6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.191101] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 653.191101] env[69994]: value = "task-2925014" [ 653.191101] env[69994]: _type = "Task" [ 653.191101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.204776] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.438831] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925013, 'name': Rename_Task, 'duration_secs': 0.354749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.439763] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 653.439763] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c42876db-a080-466f-985a-80641513a747 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.447498] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 653.447498] env[69994]: value = "task-2925015" [ 653.447498] env[69994]: _type = "Task" [ 653.447498] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.458125] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.501211] env[69994]: DEBUG nova.compute.manager [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Received event network-changed {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.501211] env[69994]: DEBUG nova.compute.manager [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Refreshing instance network info cache due to event network-changed. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 653.501211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] Acquiring lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.501211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] Acquired lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.501211] env[69994]: DEBUG nova.network.neutron [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.631841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Releasing lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.631841] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Instance network_info: |[{"id": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "address": "fa:16:3e:49:bc:e1", "network": {"id": "299ed161-1495-41eb-8daf-65fcb8c6d076", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2112394148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c74711a6c2c746d6aad95a38262c88a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b00ea1-87", "ovs_interfaceid": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 653.632546] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:bc:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70b00ea1-8747-4c12-8a6c-5abf6c3669f2', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.641903] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Creating folder: Project (c74711a6c2c746d6aad95a38262c88a2). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.643824] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.648068] env[69994]: DEBUG nova.compute.utils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.652735] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a8c44fd-bfa9-4e91-915b-641b077fde88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.659351] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.659785] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.681188] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Created folder: Project (c74711a6c2c746d6aad95a38262c88a2) in parent group-v587342. [ 653.681521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Creating folder: Instances. Parent ref: group-v587372. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.681871] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abd44107-d2ed-403e-b817-ff191340dd3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.698916] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Created folder: Instances in parent group-v587372. [ 653.698916] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.699303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.699574] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5158540-a0e0-4417-b531-b8ee12726098 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.722329] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925014, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.729545] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.729545] env[69994]: value = "task-2925018" [ 653.729545] env[69994]: _type = "Task" [ 653.729545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.734202] env[69994]: DEBUG nova.policy [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd0315a4332c45ccb9a9d7fec7b5c734', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a27eadbf075948a38f37e6a97f1db130', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.743376] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925018, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.865300] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Successfully updated port: d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.963822] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.052354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "e87e1839-9fef-462d-b1ab-842ef76828a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.052611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.164030] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.204082] env[69994]: DEBUG oslo_vmware.api [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925014, 'name': PowerOnVM_Task, 'duration_secs': 0.518817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.204374] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 654.204571] env[69994]: INFO nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Took 12.68 seconds to spawn the instance on the hypervisor. [ 654.204745] env[69994]: DEBUG nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 654.205607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c584d959-1cb2-4641-92b3-0522e49d9536 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.248587] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925018, 'name': CreateVM_Task, 'duration_secs': 0.472312} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.251230] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.254203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.254203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.254546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.255372] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f201d1-f0ad-4486-aaa3-48d6e7a0e819 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.262903] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 654.262903] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527a6369-fd56-74be-4625-4e1095a4fc09" [ 654.262903] env[69994]: _type = "Task" [ 654.262903] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.280026] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527a6369-fd56-74be-4625-4e1095a4fc09, 'name': SearchDatastore_Task, 'duration_secs': 0.0143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.282988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.283357] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.283670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.283887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.284167] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.284775] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c58562d2-13c6-4e61-a7ac-1702d3a168ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.296858] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.296858] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.298207] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f902cd-63e7-4c4a-92bf-31b7911ca3c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.312795] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 654.312795] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524a22f5-3c41-3332-9cf1-20d8132d9fee" [ 654.312795] env[69994]: _type = "Task" [ 654.312795] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.325515] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524a22f5-3c41-3332-9cf1-20d8132d9fee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.368351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.368607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquired lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.368695] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.463244] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.485098] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Successfully created port: 68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.488949] env[69994]: DEBUG nova.network.neutron [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Updating instance_info_cache with network_info: [{"id": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "address": "fa:16:3e:52:e1:1f", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbffe8ff2-1b", "ovs_interfaceid": "bffe8ff2-1bac-4992-8b93-aef1a09a525b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.581306] env[69994]: DEBUG nova.compute.manager [None req-e410c47e-5443-4369-a596-d5028914b794 tempest-ServerDiagnosticsV248Test-70835274 tempest-ServerDiagnosticsV248Test-70835274-project-admin] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 654.582472] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab922bc-9526-4331-8f63-c636232a8bd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.595210] env[69994]: INFO nova.compute.manager [None req-e410c47e-5443-4369-a596-d5028914b794 tempest-ServerDiagnosticsV248Test-70835274 tempest-ServerDiagnosticsV248Test-70835274-project-admin] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Retrieving diagnostics [ 654.598522] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84437a57-7e39-4179-8f9a-86d5c9179dc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.732704] env[69994]: INFO nova.compute.manager [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Took 30.73 seconds to build instance. [ 654.744955] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb3002c-955a-419d-a713-e4ab46af91c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.754961] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2698816e-db30-4aee-9037-2992c5ff73fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.794844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778d8d6c-500c-4792-a04d-3676ee720c8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.804545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a171da41-cc8e-4856-ac14-cc60f1586934 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.822836] env[69994]: DEBUG nova.compute.provider_tree [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.833525] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524a22f5-3c41-3332-9cf1-20d8132d9fee, 'name': SearchDatastore_Task, 'duration_secs': 0.012427} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.840619] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1d82d5-e2fb-4817-9b59-e7af7e33d4d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.847114] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 654.847114] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c58da6-228f-e59f-8312-f734f9ca77d8" [ 654.847114] env[69994]: _type = "Task" [ 654.847114] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.857839] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c58da6-228f-e59f-8312-f734f9ca77d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.932644] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.966869] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.999746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f8e7473-8ef4-4a92-a828-ff36cd5b06dd tempest-ServerExternalEventsTest-714026611 tempest-ServerExternalEventsTest-714026611-project] Releasing lock "refresh_cache-7ea91d3b-1e43-45cd-9bff-e144c63177c8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.118116] env[69994]: DEBUG nova.network.neutron [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updating instance_info_cache with network_info: [{"id": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "address": "fa:16:3e:fb:05:0b", "network": {"id": "1b416d5a-e592-4170-a80b-d0406b3d7cbe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-575332336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e32425291ea4111ad7aae069c945b1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b5dcfa-33", "ovs_interfaceid": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.179597] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.213216] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.213488] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.213646] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.213844] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.213963] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.214123] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.214327] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.214485] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.214646] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.214816] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.215015] env[69994]: DEBUG nova.virt.hardware [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.215879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d202697-e952-49d7-9e33-b1a3e063a1da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.225153] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8252029f-2482-4acf-ade2-470059d131f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.240016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b949684-5511-4f07-8596-c9ea60aeab42 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.250s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.325458] env[69994]: DEBUG nova.scheduler.client.report [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.339529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.340215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.340215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.340380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.340951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.343264] env[69994]: INFO nova.compute.manager [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Terminating instance [ 655.361107] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c58da6-228f-e59f-8312-f734f9ca77d8, 'name': SearchDatastore_Task, 'duration_secs': 0.021803} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.361737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.362056] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 443382a8-64af-4f13-b7ab-11234fb13fcf/443382a8-64af-4f13-b7ab-11234fb13fcf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.362357] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08687d4d-db81-44fa-8cbb-7d00d4f93916 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.371734] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 655.371734] env[69994]: value = "task-2925019" [ 655.371734] env[69994]: _type = "Task" [ 655.371734] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.383506] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.461049] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.624129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Releasing lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.624480] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Instance network_info: |[{"id": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "address": "fa:16:3e:fb:05:0b", "network": {"id": "1b416d5a-e592-4170-a80b-d0406b3d7cbe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-575332336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e32425291ea4111ad7aae069c945b1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b5dcfa-33", "ovs_interfaceid": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 655.624919] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:05:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e350f83a-f581-4e10-ac16-0b0f7bfd3d38', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5b5dcfa-33de-47f7-8356-2384f6ed2083', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.635491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Creating folder: Project (9e32425291ea4111ad7aae069c945b1c). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.636446] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a361564-527a-4555-b227-db998eb900fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.650699] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Created folder: Project (9e32425291ea4111ad7aae069c945b1c) in parent group-v587342. [ 655.650904] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Creating folder: Instances. Parent ref: group-v587375. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.651213] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-939578b2-f19d-4a5c-b068-cad5f63cf116 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.671614] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Created folder: Instances in parent group-v587375. [ 655.671901] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.672126] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.672688] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80796366-3f34-441d-bba4-c2c1964b0d97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.697521] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Received event network-changed-70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.697958] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Refreshing instance network info cache due to event network-changed-70b00ea1-8747-4c12-8a6c-5abf6c3669f2. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 655.697958] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Acquiring lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.698137] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Acquired lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.698255] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Refreshing network info cache for port 70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.709914] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.709914] env[69994]: value = "task-2925022" [ 655.709914] env[69994]: _type = "Task" [ 655.709914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.722681] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925022, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.745511] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 655.834607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.836180] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 655.839577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.239s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.841206] env[69994]: INFO nova.compute.claims [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.846452] env[69994]: DEBUG nova.compute.manager [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 655.846757] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.847976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88fe972-4111-464e-bf55-0d3b7c28d01f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.863982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 655.863982] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3af97fa-e8d4-4f07-b6f2-3b3f49ec045e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.876242] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 655.876242] env[69994]: value = "task-2925023" [ 655.876242] env[69994]: _type = "Task" [ 655.876242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.894303] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.894603] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925019, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.968176] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.221915] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925022, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.269093] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.347876] env[69994]: DEBUG nova.compute.utils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 656.349748] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 656.349978] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 656.393656] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925023, 'name': PowerOffVM_Task, 'duration_secs': 0.399882} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.397746] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 656.398083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 656.398850] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.399169] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2752a44-e6aa-413f-8cbf-27e91cf2a208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.400993] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 443382a8-64af-4f13-b7ab-11234fb13fcf/443382a8-64af-4f13-b7ab-11234fb13fcf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.401312] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.404959] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e16a0ca-3b1e-43aa-9ad0-af30ccda4c47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.427713] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 656.427713] env[69994]: value = "task-2925025" [ 656.427713] env[69994]: _type = "Task" [ 656.427713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.442526] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.457438] env[69994]: DEBUG nova.policy [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550fe2bfeab14f0fa409c65d98954e7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21bf4c6f3b2c45218949b0e6c1eb84fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 656.467968] env[69994]: DEBUG oslo_vmware.api [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925015, 'name': PowerOnVM_Task, 'duration_secs': 2.629868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.468381] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.468639] env[69994]: DEBUG nova.compute.manager [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.469878] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739cfe6f-80ed-4e09-9f3e-2009e801ab1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.500761] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 656.500761] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 656.503686] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Deleting the datastore file [datastore2] 7ea91d3b-1e43-45cd-9bff-e144c63177c8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 656.504066] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c6e3200-6637-42c7-93b8-48bbc5097638 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.511820] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for the task: (returnval){ [ 656.511820] env[69994]: value = "task-2925026" [ 656.511820] env[69994]: _type = "Task" [ 656.511820] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.530884] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.732562] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925022, 'name': CreateVM_Task, 'duration_secs': 0.544873} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.732757] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.733709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.733902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.734230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.734507] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3ffc2a3-b948-498a-a572-d1c38057c6e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.740496] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 656.740496] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a4ddf-5c28-ad92-13ad-5271a2c25fd9" [ 656.740496] env[69994]: _type = "Task" [ 656.740496] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.755722] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a4ddf-5c28-ad92-13ad-5271a2c25fd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.862375] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.940609] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083864} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.941235] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.942367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0bb34b-406e-4a91-9276-67e79c6b5d36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.969540] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 443382a8-64af-4f13-b7ab-11234fb13fcf/443382a8-64af-4f13-b7ab-11234fb13fcf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.973632] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Updated VIF entry in instance network info cache for port 70b00ea1-8747-4c12-8a6c-5abf6c3669f2. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 656.973891] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Updating instance_info_cache with network_info: [{"id": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "address": "fa:16:3e:49:bc:e1", "network": {"id": "299ed161-1495-41eb-8daf-65fcb8c6d076", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2112394148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c74711a6c2c746d6aad95a38262c88a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b00ea1-87", "ovs_interfaceid": "70b00ea1-8747-4c12-8a6c-5abf6c3669f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.975340] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6728c29d-4d11-408c-b95a-b5e4023b3e67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.001942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.002919] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 657.002919] env[69994]: value = "task-2925027" [ 657.002919] env[69994]: _type = "Task" [ 657.002919] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.023589] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.033258] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.046648] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Successfully updated port: 68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.131606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.132204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.132204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.133018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.133018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.135386] env[69994]: INFO nova.compute.manager [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Terminating instance [ 657.222621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "db9f7abd-ab45-49a3-9035-695b26756142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.223094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.261384] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a4ddf-5c28-ad92-13ad-5271a2c25fd9, 'name': SearchDatastore_Task, 'duration_secs': 0.048834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.261384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "367665db-def4-4148-a316-b83378e00ba8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.261384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.261384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.261679] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.261679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.261679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.261679] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.261679] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc86e501-4b77-4335-9ead-584907d0c9bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.276874] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.277249] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.278462] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6631d157-4c83-42b3-8a1a-599f57a2d61b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.288457] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 657.288457] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522ce2a1-f564-1db4-9081-001f7a891497" [ 657.288457] env[69994]: _type = "Task" [ 657.288457] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.304598] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522ce2a1-f564-1db4-9081-001f7a891497, 'name': SearchDatastore_Task} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.306351] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e73820f-011f-41a3-93c5-90ecf3bbdf7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.315598] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 657.315598] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52748ed5-4501-3dec-66f5-655c1dc448ca" [ 657.315598] env[69994]: _type = "Task" [ 657.315598] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.329837] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Successfully created port: 3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.336328] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52748ed5-4501-3dec-66f5-655c1dc448ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013675} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.337489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.340506] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 75e952e7-6761-49a4-9193-175f5d30494e/75e952e7-6761-49a4-9193-175f5d30494e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.340506] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-002d15ea-b6a2-44a0-a571-464f0dd4bdda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.349181] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 657.349181] env[69994]: value = "task-2925028" [ 657.349181] env[69994]: _type = "Task" [ 657.349181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.358601] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.441263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adfc325-fe09-4a57-ac25-f7fa9404e617 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.449767] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b37999-8635-4f32-b0d9-7a015817f376 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.482779] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d5fffb-d8f2-424d-8b2c-5ba53a3f6e63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.491146] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1bcff6-a17e-4d88-b6e1-566e5d78f97f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.495706] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Releasing lock "refresh_cache-443382a8-64af-4f13-b7ab-11234fb13fcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.496144] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Received event network-vif-plugged-d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 657.497171] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Acquiring lock "75e952e7-6761-49a4-9193-175f5d30494e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.497171] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Lock "75e952e7-6761-49a4-9193-175f5d30494e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.497171] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Lock "75e952e7-6761-49a4-9193-175f5d30494e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.497171] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] No waiting events found dispatching network-vif-plugged-d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 657.497171] env[69994]: WARNING nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Received unexpected event network-vif-plugged-d5b5dcfa-33de-47f7-8356-2384f6ed2083 for instance with vm_state building and task_state spawning. [ 657.497477] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Received event network-changed-d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 657.498178] env[69994]: DEBUG nova.compute.manager [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Refreshing instance network info cache due to event network-changed-d5b5dcfa-33de-47f7-8356-2384f6ed2083. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 657.498178] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Acquiring lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.498178] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Acquired lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.498375] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Refreshing network info cache for port d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.513680] env[69994]: DEBUG nova.compute.provider_tree [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.529845] env[69994]: DEBUG oslo_vmware.api [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Task: {'id': task-2925026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.755152} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.534325] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 657.534626] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 657.534922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 657.534968] env[69994]: INFO nova.compute.manager [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Took 1.69 seconds to destroy the instance on the hypervisor. [ 657.535238] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 657.536414] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925027, 'name': ReconfigVM_Task, 'duration_secs': 0.44283} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.536414] env[69994]: DEBUG nova.compute.manager [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 657.538110] env[69994]: DEBUG nova.network.neutron [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 657.538281] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 443382a8-64af-4f13-b7ab-11234fb13fcf/443382a8-64af-4f13-b7ab-11234fb13fcf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.539150] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-310c7e01-c9fd-4bdd-8b3d-3b1ca8a6f3ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.548954] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 657.548954] env[69994]: value = "task-2925029" [ 657.548954] env[69994]: _type = "Task" [ 657.548954] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.555252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.555252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquired lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.555347] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.564146] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925029, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.640582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "refresh_cache-48f6ebca-d7fe-4086-80f4-0b89789dcddb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.640778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquired lock "refresh_cache-48f6ebca-d7fe-4086-80f4-0b89789dcddb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.640957] env[69994]: DEBUG nova.network.neutron [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.862490] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925028, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.879937] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 657.912783] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.913165] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.913236] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.913448] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.914588] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.914588] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.914588] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.914588] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.914588] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.914890] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.914890] env[69994]: DEBUG nova.virt.hardware [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.915561] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0593dad7-856f-451a-9e4e-8d946957a708 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.928912] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef386363-b457-4386-a3b5-0472a52f212b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.024717] env[69994]: DEBUG nova.scheduler.client.report [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.065577] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925029, 'name': Rename_Task, 'duration_secs': 0.304683} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.068257] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.068850] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f683fcbe-1a4c-4050-a047-fd2efaea0f64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.081858] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 658.081858] env[69994]: value = "task-2925030" [ 658.081858] env[69994]: _type = "Task" [ 658.081858] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.091841] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.139085] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.171879] env[69994]: DEBUG nova.network.neutron [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.283877] env[69994]: DEBUG nova.network.neutron [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.359772] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577312} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.360407] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 75e952e7-6761-49a4-9193-175f5d30494e/75e952e7-6761-49a4-9193-175f5d30494e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.360594] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.360928] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba78793e-344e-4dad-be52-6156d987f949 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.375405] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 658.375405] env[69994]: value = "task-2925031" [ 658.375405] env[69994]: _type = "Task" [ 658.375405] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.395949] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.530103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.530663] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.535180] env[69994]: DEBUG nova.network.neutron [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.536263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 17.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.576610] env[69994]: DEBUG nova.network.neutron [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updating instance_info_cache with network_info: [{"id": "68b29b35-015e-4545-af50-70655d1914db", "address": "fa:16:3e:fb:a3:d3", "network": {"id": "51754dc8-5636-4471-80e9-79d9743ed5d8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-735809087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a27eadbf075948a38f37e6a97f1db130", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68b29b35-01", "ovs_interfaceid": "68b29b35-015e-4545-af50-70655d1914db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.590343] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updated VIF entry in instance network info cache for port d5b5dcfa-33de-47f7-8356-2384f6ed2083. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 658.590343] env[69994]: DEBUG nova.network.neutron [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updating instance_info_cache with network_info: [{"id": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "address": "fa:16:3e:fb:05:0b", "network": {"id": "1b416d5a-e592-4170-a80b-d0406b3d7cbe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-575332336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e32425291ea4111ad7aae069c945b1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b5dcfa-33", "ovs_interfaceid": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.598592] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925030, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.788837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Releasing lock "refresh_cache-48f6ebca-d7fe-4086-80f4-0b89789dcddb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.790234] env[69994]: DEBUG nova.compute.manager [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 658.790234] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 658.790856] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676a0a78-aa64-44de-a1af-5f0aec74957c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.801550] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 658.801887] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7568eb26-40ff-4eda-b98e-6e7301d0300d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.812980] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 658.812980] env[69994]: value = "task-2925032" [ 658.812980] env[69994]: _type = "Task" [ 658.812980] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.827659] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2925032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.846552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "f36c29d1-b945-4afe-abbd-431e59de7cec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.846552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.888776] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135586} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.889241] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.890164] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f36c8bc-107b-4155-95f6-1ebb32bb8bb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.921634] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 75e952e7-6761-49a4-9193-175f5d30494e/75e952e7-6761-49a4-9193-175f5d30494e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.922356] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1594907d-bf3e-4576-b958-99d9121b9281 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.946652] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 658.946652] env[69994]: value = "task-2925033" [ 658.946652] env[69994]: _type = "Task" [ 658.946652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.956889] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.037126] env[69994]: DEBUG nova.compute.utils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.038576] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 659.038721] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.041184] env[69994]: INFO nova.compute.manager [-] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Took 1.50 seconds to deallocate network for instance. [ 659.045167] env[69994]: INFO nova.compute.claims [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.080280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Releasing lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.080634] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Instance network_info: |[{"id": "68b29b35-015e-4545-af50-70655d1914db", "address": "fa:16:3e:fb:a3:d3", "network": {"id": "51754dc8-5636-4471-80e9-79d9743ed5d8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-735809087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a27eadbf075948a38f37e6a97f1db130", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68b29b35-01", "ovs_interfaceid": "68b29b35-015e-4545-af50-70655d1914db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 659.081065] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:a3:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68b29b35-015e-4545-af50-70655d1914db', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.092764] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Creating folder: Project (a27eadbf075948a38f37e6a97f1db130). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.094505] env[69994]: DEBUG nova.policy [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c05ace0df7fe4a72bb3045dcb50fdfe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a9a6d7e114941d5a384d9907b491335', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.098453] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0d60988-7cf6-4200-9af2-696db37968f3 req-9b59509d-f540-49ce-a356-1b1486c3eba5 service nova] Releasing lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.098883] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16aea5ed-b277-4afc-8205-d67c917d192e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.118306] env[69994]: DEBUG oslo_vmware.api [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925030, 'name': PowerOnVM_Task, 'duration_secs': 0.716608} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.120435] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.120536] env[69994]: INFO nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Took 8.49 seconds to spawn the instance on the hypervisor. [ 659.120755] env[69994]: DEBUG nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.120995] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Created folder: Project (a27eadbf075948a38f37e6a97f1db130) in parent group-v587342. [ 659.121204] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Creating folder: Instances. Parent ref: group-v587378. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.122597] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a920490-7df1-490d-a081-277f9ed1641c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.125257] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ace0c443-677f-4fea-8c40-1119f5450f32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.138126] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Created folder: Instances in parent group-v587378. [ 659.138454] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.138622] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.138803] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-047adc9f-5495-449f-a22c-fa002c13cca8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.165853] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.165853] env[69994]: value = "task-2925036" [ 659.165853] env[69994]: _type = "Task" [ 659.165853] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.176444] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925036, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.320374] env[69994]: DEBUG nova.compute.manager [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Received event network-vif-plugged-68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.320776] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Acquiring lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.320851] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.320995] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.321179] env[69994]: DEBUG nova.compute.manager [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] No waiting events found dispatching network-vif-plugged-68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 659.321375] env[69994]: WARNING nova.compute.manager [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Received unexpected event network-vif-plugged-68b29b35-015e-4545-af50-70655d1914db for instance with vm_state building and task_state spawning. [ 659.321587] env[69994]: DEBUG nova.compute.manager [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Received event network-changed-68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.321773] env[69994]: DEBUG nova.compute.manager [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Refreshing instance network info cache due to event network-changed-68b29b35-015e-4545-af50-70655d1914db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 659.321989] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Acquiring lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.322156] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Acquired lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.322336] env[69994]: DEBUG nova.network.neutron [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Refreshing network info cache for port 68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.338270] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2925032, 'name': PowerOffVM_Task, 'duration_secs': 0.226143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.338270] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 659.338454] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 659.338733] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6900502-df86-4235-aa21-caaa028f847d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.376730] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 659.377291] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 659.377705] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Deleting the datastore file [datastore2] 48f6ebca-d7fe-4086-80f4-0b89789dcddb {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 659.378159] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f70d170-8dde-410d-a5a0-43204f346eb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.387417] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for the task: (returnval){ [ 659.387417] env[69994]: value = "task-2925038" [ 659.387417] env[69994]: _type = "Task" [ 659.387417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.401870] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2925038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.464214] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925033, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.505919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.506200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.551297] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Successfully created port: c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.551491] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.556150] env[69994]: INFO nova.compute.resource_tracker [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating resource usage from migration e07e0aa8-0c00-41d2-b1b6-07ee708e59ff [ 659.560540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.644718] env[69994]: INFO nova.compute.manager [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Took 25.50 seconds to build instance. [ 659.679399] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925036, 'name': CreateVM_Task, 'duration_secs': 0.496684} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.679742] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.680748] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.681108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.681288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 659.685328] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b25f9c1b-911c-44e6-ae21-0986845ab7d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.688039] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 659.688039] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526abd00-34a0-06f5-c335-593a964b017c" [ 659.688039] env[69994]: _type = "Task" [ 659.688039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.701621] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526abd00-34a0-06f5-c335-593a964b017c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.753412] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Successfully updated port: 3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 659.826481] env[69994]: INFO nova.compute.manager [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Rebuilding instance [ 659.883144] env[69994]: DEBUG nova.compute.manager [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.884426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d68467-44ec-414b-a7ec-acc475437aa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.903707] env[69994]: DEBUG oslo_vmware.api [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Task: {'id': task-2925038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.906637] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 659.906818] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 659.907207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 659.907268] env[69994]: INFO nova.compute.manager [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 659.907589] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.908171] env[69994]: DEBUG nova.compute.manager [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 659.908377] env[69994]: DEBUG nova.network.neutron [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 659.928677] env[69994]: DEBUG nova.network.neutron [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.961788] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925033, 'name': ReconfigVM_Task, 'duration_secs': 0.526524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.964476] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 75e952e7-6761-49a4-9193-175f5d30494e/75e952e7-6761-49a4-9193-175f5d30494e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.965308] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e574bd9d-3f5b-4192-82dc-8c28f3df3513 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.974784] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 659.974784] env[69994]: value = "task-2925039" [ 659.974784] env[69994]: _type = "Task" [ 659.974784] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.987341] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925039, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.053944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae1c0b4-e4af-4a29-b2c7-cc8eff609c3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.066188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128a1447-3054-4cb2-b95a-9e7c2557d83d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.105539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdd4ae8-b74b-4e59-9847-a09a166b5250 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.120386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd262f5-4a0f-4f81-998f-5031e88eeb88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.138042] env[69994]: DEBUG nova.compute.provider_tree [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.147353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-731048fb-816c-4f0c-8c2f-c5b685220b52 tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.713s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.202866] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526abd00-34a0-06f5-c335-593a964b017c, 'name': SearchDatastore_Task, 'duration_secs': 0.030437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.203268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.203562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.203820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.203966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.204162] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.204424] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4788a3c0-3637-49bc-8552-ced58cb066d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.216109] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.216320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.217070] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c082fe-f9f5-4c61-9e8a-57d20cb8301e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.223072] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 660.223072] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a0a1f-7f75-bdef-ddbe-3078dc52bf17" [ 660.223072] env[69994]: _type = "Task" [ 660.223072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.234340] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a0a1f-7f75-bdef-ddbe-3078dc52bf17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.257207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.257207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.257207] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.260184] env[69994]: DEBUG nova.network.neutron [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updated VIF entry in instance network info cache for port 68b29b35-015e-4545-af50-70655d1914db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.260466] env[69994]: DEBUG nova.network.neutron [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updating instance_info_cache with network_info: [{"id": "68b29b35-015e-4545-af50-70655d1914db", "address": "fa:16:3e:fb:a3:d3", "network": {"id": "51754dc8-5636-4471-80e9-79d9743ed5d8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-735809087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a27eadbf075948a38f37e6a97f1db130", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68b29b35-01", "ovs_interfaceid": "68b29b35-015e-4545-af50-70655d1914db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.431396] env[69994]: DEBUG nova.network.neutron [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.490011] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925039, 'name': Rename_Task, 'duration_secs': 0.236666} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.490011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 660.490011] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2aa2a665-237f-4e7a-9e3c-4fba070b7a85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.496158] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 660.496158] env[69994]: value = "task-2925040" [ 660.496158] env[69994]: _type = "Task" [ 660.496158] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.511602] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.567099] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.612354] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.612595] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.612837] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.613085] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.613246] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.614035] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.614035] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.614035] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.614035] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.614299] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.614339] env[69994]: DEBUG nova.virt.hardware [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.615789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc2b9f0-6550-4f42-8b71-6118f3c3e21c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.620946] env[69994]: DEBUG nova.compute.manager [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Received event network-vif-plugged-3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 660.620946] env[69994]: DEBUG oslo_concurrency.lockutils [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] Acquiring lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.621284] env[69994]: DEBUG oslo_concurrency.lockutils [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.621284] env[69994]: DEBUG oslo_concurrency.lockutils [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.622101] env[69994]: DEBUG nova.compute.manager [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] No waiting events found dispatching network-vif-plugged-3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.622101] env[69994]: WARNING nova.compute.manager [req-b46b14d0-6292-4a6c-a860-2a62c6a4a258 req-ee1c2ccf-57ce-4418-a646-270b0ce02318 service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Received unexpected event network-vif-plugged-3e918786-01b7-4a7e-a884-720a3c170676 for instance with vm_state building and task_state spawning. [ 660.642050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7ae31d-da74-43da-9d1e-365f680959a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.647745] env[69994]: DEBUG nova.scheduler.client.report [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.650997] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.739373] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a0a1f-7f75-bdef-ddbe-3078dc52bf17, 'name': SearchDatastore_Task, 'duration_secs': 0.043656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.740509] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-969b291d-99c9-45c8-b009-4fcd014f7d92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.750142] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 660.750142] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9add4-a8f3-386d-265a-994f11711ec1" [ 660.750142] env[69994]: _type = "Task" [ 660.750142] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.760465] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9add4-a8f3-386d-265a-994f11711ec1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.764885] env[69994]: DEBUG oslo_concurrency.lockutils [req-22f9f092-9952-4c67-978f-24dcda7e607a req-b2b1cb6e-e92b-465c-ad14-9ec4cd1f4252 service nova] Releasing lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.819714] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.905374] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 660.905616] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfa418a6-02e8-44c0-9647-9c41270dc2ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.915759] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 660.915759] env[69994]: value = "task-2925041" [ 660.915759] env[69994]: _type = "Task" [ 660.915759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.927322] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.935368] env[69994]: INFO nova.compute.manager [-] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Took 1.03 seconds to deallocate network for instance. [ 661.009129] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925040, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.023308] env[69994]: DEBUG nova.network.neutron [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Updating instance_info_cache with network_info: [{"id": "3e918786-01b7-4a7e-a884-720a3c170676", "address": "fa:16:3e:42:46:7f", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e918786-01", "ovs_interfaceid": "3e918786-01b7-4a7e-a884-720a3c170676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.159985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.160296] env[69994]: INFO nova.compute.manager [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Migrating [ 661.160598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.160744] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.164539] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.178s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.166165] env[69994]: INFO nova.compute.claims [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.170395] env[69994]: INFO nova.compute.rpcapi [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 661.171659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.205879] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.266408] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9add4-a8f3-386d-265a-994f11711ec1, 'name': SearchDatastore_Task, 'duration_secs': 0.023324} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.266646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.266934] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9e9973e1-feb8-4fd7-95ae-e6d824af5a64/9e9973e1-feb8-4fd7-95ae-e6d824af5a64.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.267349] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-409474cd-43a8-45cf-ae10-b0bb1860012f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.282522] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 661.282522] env[69994]: value = "task-2925042" [ 661.282522] env[69994]: _type = "Task" [ 661.282522] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.292447] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.431633] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925041, 'name': PowerOffVM_Task, 'duration_secs': 0.142593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.432898] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.432898] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.433533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8c59ff-459b-4d89-925d-741b2dc102cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.443422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.443754] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 661.444851] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09f6b987-963d-4465-ae36-4943ee009bd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.478944] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 661.480606] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 661.480606] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Deleting the datastore file [datastore1] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.480606] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae52354e-1d6c-49eb-b0c3-89ea889f906c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.492292] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 661.492292] env[69994]: value = "task-2925044" [ 661.492292] env[69994]: _type = "Task" [ 661.492292] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.507372] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.516821] env[69994]: DEBUG oslo_vmware.api [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925040, 'name': PowerOnVM_Task, 'duration_secs': 0.90088} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.517340] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.517806] env[69994]: INFO nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Took 8.91 seconds to spawn the instance on the hypervisor. [ 661.518239] env[69994]: DEBUG nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.519314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d291be0f-eef9-4880-8e92-b9cd1631ea47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.527463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.528247] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance network_info: |[{"id": "3e918786-01b7-4a7e-a884-720a3c170676", "address": "fa:16:3e:42:46:7f", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e918786-01", "ovs_interfaceid": "3e918786-01b7-4a7e-a884-720a3c170676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 661.531496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:46:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e918786-01b7-4a7e-a884-720a3c170676', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 661.544020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating folder: Project (21bf4c6f3b2c45218949b0e6c1eb84fd). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.548478] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1560cd76-2a8c-4764-af3e-4837bd2c6d9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.570619] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created folder: Project (21bf4c6f3b2c45218949b0e6c1eb84fd) in parent group-v587342. [ 661.570687] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating folder: Instances. Parent ref: group-v587381. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.571442] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9ee7173-4df4-4b71-9276-b4301d1dd669 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.591143] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created folder: Instances in parent group-v587381. [ 661.591143] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 661.591143] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 661.591143] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29c72a8d-e813-4c76-ac5b-519f30ca67fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.617646] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 661.617646] env[69994]: value = "task-2925047" [ 661.617646] env[69994]: _type = "Task" [ 661.617646] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.637261] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925047, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.647790] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Successfully updated port: c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.689174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.689174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.689174] env[69994]: DEBUG nova.network.neutron [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.795603] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925042, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.005609] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.428137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.006751] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.006991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 662.007271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.064128] env[69994]: INFO nova.compute.manager [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Took 26.87 seconds to build instance. [ 662.072174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "443382a8-64af-4f13-b7ab-11234fb13fcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.072430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.072630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.072834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.073082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.075457] env[69994]: INFO nova.compute.manager [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Terminating instance [ 662.141328] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925047, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.151211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.151453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.151808] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.169708] env[69994]: DEBUG nova.compute.manager [req-65f2f08e-c677-4930-ada6-e41f2133d103 req-9115746d-b79b-4f08-879e-57296ce26b33 service nova] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Received event network-vif-deleted-bffe8ff2-1bac-4992-8b93-aef1a09a525b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.295584] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57976} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.295877] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9e9973e1-feb8-4fd7-95ae-e6d824af5a64/9e9973e1-feb8-4fd7-95ae-e6d824af5a64.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.297033] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.297033] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5027c244-8a1b-4144-9820-891f735df245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.304132] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 662.304132] env[69994]: value = "task-2925048" [ 662.304132] env[69994]: _type = "Task" [ 662.304132] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.323041] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.555376] env[69994]: DEBUG nova.network.neutron [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.570743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3cb89da2-1d0f-4e5c-b9bc-95f5c8a07e3b tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.356s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.580569] env[69994]: DEBUG nova.compute.manager [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 662.580813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 662.581739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b154142c-e80f-4647-afe4-dedd20672dc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.601972] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 662.601972] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34529b18-a697-4890-a6bd-db9bb818f132 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.610518] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 662.610518] env[69994]: value = "task-2925049" [ 662.610518] env[69994]: _type = "Task" [ 662.610518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.623787] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.638407] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925047, 'name': CreateVM_Task, 'duration_secs': 0.598017} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.638568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 662.639408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.639556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.640149] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 662.640853] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ed25e3-4897-40bf-9e35-dd9c3c1baca5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.646657] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 662.646657] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284277e-0c48-4c67-1269-66ba9129dd11" [ 662.646657] env[69994]: _type = "Task" [ 662.646657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.659668] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284277e-0c48-4c67-1269-66ba9129dd11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.677441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aee202-d4ce-4ddc-9def-8347717dd1a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.686364] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155d3a04-7081-4a78-848d-198f2a1a296b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.718702] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.721439] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4b0933-a43d-4743-92c4-4a4a910a1c49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.730499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e79fd53-5358-4d26-9b43-34695f7f6251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.745975] env[69994]: DEBUG nova.compute.provider_tree [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.819361] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107355} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.819656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.820550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39791d9-a880-411d-861e-0734e173d772 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.852996] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 9e9973e1-feb8-4fd7-95ae-e6d824af5a64/9e9973e1-feb8-4fd7-95ae-e6d824af5a64.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.853776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7333890-e189-445e-bfdc-613057f636b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.876215] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 662.876215] env[69994]: value = "task-2925050" [ 662.876215] env[69994]: _type = "Task" [ 662.876215] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.885950] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.051018] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 663.051018] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.051018] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 663.051018] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.051300] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 663.051599] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 663.051979] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 663.052289] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 663.055017] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 663.055017] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 663.055017] env[69994]: DEBUG nova.virt.hardware [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 663.055017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4729980a-2bbe-4f41-8e26-3743979b3dd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.057900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.066979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bf5dfc-0278-451e-b8f6-f6096e8506a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.074223] env[69994]: DEBUG nova.network.neutron [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Updating instance_info_cache with network_info: [{"id": "c2407183-ab55-4108-a4c9-1fb48e727a35", "address": "fa:16:3e:76:96:bd", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2407183-ab", "ovs_interfaceid": "c2407183-ab55-4108-a4c9-1fb48e727a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.075493] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.086241] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.091990] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.094037] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.094037] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1d9e125-ff63-453e-b1f2-c099f3122cd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.112181] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.112181] env[69994]: value = "task-2925051" [ 663.112181] env[69994]: _type = "Task" [ 663.112181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.124857] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925049, 'name': PowerOffVM_Task, 'duration_secs': 0.250877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.129501] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 663.129501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 663.129501] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925051, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.129501] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6118ef18-584c-4923-a5ac-76007df386c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.161394] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284277e-0c48-4c67-1269-66ba9129dd11, 'name': SearchDatastore_Task, 'duration_secs': 0.015846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.161740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.162188] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.162297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.162454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.162635] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.162906] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ea1dae8-2d85-4aa4-8058-d9ee3b93dcb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.174960] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.175060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.176269] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a91b3a37-1ac1-4851-ab34-cfef0b19eaaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.184560] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 663.184560] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb538a-428d-b7a7-baa6-47acb139c2df" [ 663.184560] env[69994]: _type = "Task" [ 663.184560] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.193098] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb538a-428d-b7a7-baa6-47acb139c2df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.238141] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 663.238141] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 663.238141] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Deleting the datastore file [datastore1] 443382a8-64af-4f13-b7ab-11234fb13fcf {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 663.238710] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ff948e1-0036-462e-a623-afb2ab9f4539 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.246486] env[69994]: DEBUG nova.compute.manager [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Received event network-changed-3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 663.246486] env[69994]: DEBUG nova.compute.manager [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Refreshing instance network info cache due to event network-changed-3e918786-01b7-4a7e-a884-720a3c170676. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 663.247066] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] Acquiring lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.247066] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] Acquired lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.247066] env[69994]: DEBUG nova.network.neutron [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Refreshing network info cache for port 3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.251019] env[69994]: DEBUG nova.scheduler.client.report [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.254825] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for the task: (returnval){ [ 663.254825] env[69994]: value = "task-2925053" [ 663.254825] env[69994]: _type = "Task" [ 663.254825] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.289274] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.392189] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925050, 'name': ReconfigVM_Task, 'duration_secs': 0.393263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.392545] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 9e9973e1-feb8-4fd7-95ae-e6d824af5a64/9e9973e1-feb8-4fd7-95ae-e6d824af5a64.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.395119] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d1e2e7e-0166-4c6b-9495-3d896b28de1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.401949] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 663.401949] env[69994]: value = "task-2925054" [ 663.401949] env[69994]: _type = "Task" [ 663.401949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.414868] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925054, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.587278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.587638] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Instance network_info: |[{"id": "c2407183-ab55-4108-a4c9-1fb48e727a35", "address": "fa:16:3e:76:96:bd", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2407183-ab", "ovs_interfaceid": "c2407183-ab55-4108-a4c9-1fb48e727a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.588019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:96:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2407183-ab55-4108-a4c9-1fb48e727a35', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.596597] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Creating folder: Project (0a9a6d7e114941d5a384d9907b491335). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.601929] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfe4c9af-e230-49f9-a353-aa350887d89a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.631868] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925051, 'name': CreateVM_Task, 'duration_secs': 0.334622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.632164] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Created folder: Project (0a9a6d7e114941d5a384d9907b491335) in parent group-v587342. [ 663.632331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Creating folder: Instances. Parent ref: group-v587385. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.632530] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.632720] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f85a4475-3d18-42e1-a371-8515f1358a4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.636472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.636923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.637085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.637530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.637778] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eef9a169-8e1f-4e67-8eca-a225e26635c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.647437] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 663.647437] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52202a5a-eb87-5fb8-f710-d443057ba52a" [ 663.647437] env[69994]: _type = "Task" [ 663.647437] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.653114] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Created folder: Instances in parent group-v587385. [ 663.653360] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.654015] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.654271] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12ef2bfb-2074-4a42-aaba-2898744574d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.677183] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52202a5a-eb87-5fb8-f710-d443057ba52a, 'name': SearchDatastore_Task, 'duration_secs': 0.012354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.677972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.678286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.678448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.678584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.680029] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.680029] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-736d9c22-b8d6-4eeb-8a23-d1f203c9a666 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.686019] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.686019] env[69994]: value = "task-2925057" [ 663.686019] env[69994]: _type = "Task" [ 663.686019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.692370] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.692779] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.694248] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53244498-31dd-417a-b9fc-b16acd5f92a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.701108] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb538a-428d-b7a7-baa6-47acb139c2df, 'name': SearchDatastore_Task, 'duration_secs': 0.015259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.702452] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4e1bf10-edbd-43b2-8863-ffda23de73d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.709964] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925057, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.710478] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 663.710478] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5293a4b4-a501-3fcb-b38c-663e6f8469bc" [ 663.710478] env[69994]: _type = "Task" [ 663.710478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.716108] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 663.716108] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264243c-10a3-837a-fb04-2ae1a57f853f" [ 663.716108] env[69994]: _type = "Task" [ 663.716108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.723874] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5293a4b4-a501-3fcb-b38c-663e6f8469bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.725082] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66c6871-6ba4-4c6d-ad7e-331e0cf7b537 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.731466] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264243c-10a3-837a-fb04-2ae1a57f853f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.735173] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 663.735173] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5257bd53-83c8-95b4-f3b6-61fc9ab8d8b1" [ 663.735173] env[69994]: _type = "Task" [ 663.735173] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.744072] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5257bd53-83c8-95b4-f3b6-61fc9ab8d8b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.756436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.757058] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.762014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.763520] env[69994]: INFO nova.compute.claims [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.778902] env[69994]: DEBUG oslo_vmware.api [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Task: {'id': task-2925053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168273} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.779237] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 663.779425] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 663.779612] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.779777] env[69994]: INFO nova.compute.manager [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Took 1.20 seconds to destroy the instance on the hypervisor. [ 663.780042] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.780251] env[69994]: DEBUG nova.compute.manager [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 663.780341] env[69994]: DEBUG nova.network.neutron [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.918168] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925054, 'name': Rename_Task, 'duration_secs': 0.166504} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.918168] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.918168] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1d48f17-64a4-402b-a23b-e2323c579fa8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.927671] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 663.927671] env[69994]: value = "task-2925058" [ 663.927671] env[69994]: _type = "Task" [ 663.927671] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.935257] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.204775] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925057, 'name': CreateVM_Task, 'duration_secs': 0.368469} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.205032] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 664.205685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.205929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.206288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 664.206638] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9a93b87-73cb-4e96-9b82-24a836c9bb98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.214842] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 664.214842] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cc1d8b-4a03-f5ed-042f-29a18b282017" [ 664.214842] env[69994]: _type = "Task" [ 664.214842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.233741] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264243c-10a3-837a-fb04-2ae1a57f853f, 'name': SearchDatastore_Task, 'duration_secs': 0.01973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.236781] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cc1d8b-4a03-f5ed-042f-29a18b282017, 'name': SearchDatastore_Task, 'duration_secs': 0.012481} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.236781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.237030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f3ae584d-18a5-4bbe-b4bf-860e2332b324/f3ae584d-18a5-4bbe-b4bf-860e2332b324.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.237655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.237655] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.237804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.238112] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e5a5df3-42ba-42f4-bfe4-f83f44a8378c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.254421] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5257bd53-83c8-95b4-f3b6-61fc9ab8d8b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010013} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.255972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.257566] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.257566] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 664.257566] env[69994]: value = "task-2925059" [ 664.257566] env[69994]: _type = "Task" [ 664.257566] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.257566] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.257566] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.258182] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-958d542d-310e-4bfe-85f3-9e067be00ce1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.259437] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5f4c22c-2dad-4f2b-ab22-86d9a3d5e359 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.268021] env[69994]: DEBUG nova.compute.utils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 664.274029] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 664.274320] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 664.278770] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 664.278770] env[69994]: value = "task-2925060" [ 664.278770] env[69994]: _type = "Task" [ 664.278770] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.281292] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.284159] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.284289] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.290850] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd90c36a-bbcb-4d04-821e-7409966b8344 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.301958] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.303579] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 664.303579] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52958d6e-3672-0f14-9323-b7f54c24396c" [ 664.303579] env[69994]: _type = "Task" [ 664.303579] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.315025] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52958d6e-3672-0f14-9323-b7f54c24396c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.401208] env[69994]: DEBUG nova.network.neutron [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Updated VIF entry in instance network info cache for port 3e918786-01b7-4a7e-a884-720a3c170676. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.401890] env[69994]: DEBUG nova.network.neutron [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Updating instance_info_cache with network_info: [{"id": "3e918786-01b7-4a7e-a884-720a3c170676", "address": "fa:16:3e:42:46:7f", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e918786-01", "ovs_interfaceid": "3e918786-01b7-4a7e-a884-720a3c170676", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.433919] env[69994]: DEBUG nova.policy [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3253706fa314bf6a8aaeb6ac4c6504f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd7386da3414f198142cee5c6d383b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.442416] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925058, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.581543] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5161c483-1817-403f-a384-bd16e50027ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.609753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 664.776857] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925059, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.784794] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.807692] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925060, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.819332] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52958d6e-3672-0f14-9323-b7f54c24396c, 'name': SearchDatastore_Task, 'duration_secs': 0.020846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.819976] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-433441a0-d4c1-4b69-9c5e-bd58238691ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.830840] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 664.830840] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526710b7-62c6-8620-628b-26e530a4564b" [ 664.830840] env[69994]: _type = "Task" [ 664.830840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.840537] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526710b7-62c6-8620-628b-26e530a4564b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.906486] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3e9cbef-730d-4579-bdaa-7ef0885217e3 req-9f23df77-00e2-42fc-8c86-ae80e8b2e73b service nova] Releasing lock "refresh_cache-f3ae584d-18a5-4bbe-b4bf-860e2332b324" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.937242] env[69994]: DEBUG oslo_vmware.api [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925058, 'name': PowerOnVM_Task, 'duration_secs': 0.513679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.940150] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.940385] env[69994]: INFO nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Took 9.76 seconds to spawn the instance on the hypervisor. [ 664.940567] env[69994]: DEBUG nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.941774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446ebb57-fb3a-42cc-a645-762e58ff8e31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.959464] env[69994]: DEBUG nova.compute.manager [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Received event network-vif-plugged-c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.959730] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Acquiring lock "803e9885-000f-4696-9fb9-03361ef46538-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.959874] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Lock "803e9885-000f-4696-9fb9-03361ef46538-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.960027] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Lock "803e9885-000f-4696-9fb9-03361ef46538-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.960189] env[69994]: DEBUG nova.compute.manager [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] No waiting events found dispatching network-vif-plugged-c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.960346] env[69994]: WARNING nova.compute.manager [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Received unexpected event network-vif-plugged-c2407183-ab55-4108-a4c9-1fb48e727a35 for instance with vm_state building and task_state spawning. [ 664.960514] env[69994]: DEBUG nova.compute.manager [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Received event network-changed-c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.960661] env[69994]: DEBUG nova.compute.manager [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Refreshing instance network info cache due to event network-changed-c2407183-ab55-4108-a4c9-1fb48e727a35. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 664.960834] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Acquiring lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.960958] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Acquired lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.961116] env[69994]: DEBUG nova.network.neutron [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Refreshing network info cache for port c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.116771] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.117022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-407a168c-7cbd-4de5-bfb4-fb9eaa734b7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.127600] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 665.127600] env[69994]: value = "task-2925061" [ 665.127600] env[69994]: _type = "Task" [ 665.127600] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.149118] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.272821] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.272821] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f3ae584d-18a5-4bbe-b4bf-860e2332b324/f3ae584d-18a5-4bbe-b4bf-860e2332b324.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.272821] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.272821] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6963ec9a-ae27-4422-94de-d7e27d8530f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.280307] env[69994]: DEBUG nova.network.neutron [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.280307] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1021f10d-c32b-4a01-9ca9-754d390481d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.304482] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 665.304482] env[69994]: value = "task-2925062" [ 665.304482] env[69994]: _type = "Task" [ 665.304482] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.316466] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d32ad59-e28e-4d56-bf78-34f9db9e3974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.324019] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741778} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.325040] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.325353] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.325919] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6ed26c4-119f-42ae-a1d1-1d8654aa12a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.333056] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.364900] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91de8d2-3d47-4e88-8d02-f11c0380496f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.370788] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 665.370788] env[69994]: value = "task-2925063" [ 665.370788] env[69994]: _type = "Task" [ 665.370788] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.383161] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834ccb35-8101-4ab1-8a4e-3527336b5557 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.389302] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526710b7-62c6-8620-628b-26e530a4564b, 'name': SearchDatastore_Task, 'duration_secs': 0.056793} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.389302] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.389302] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.392167] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 803e9885-000f-4696-9fb9-03361ef46538/803e9885-000f-4696-9fb9-03361ef46538.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 665.392167] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-246e214a-d8d5-45d8-8c50-ff48c6b9d4b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.402680] env[69994]: DEBUG nova.compute.provider_tree [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.412972] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 665.412972] env[69994]: value = "task-2925064" [ 665.412972] env[69994]: _type = "Task" [ 665.412972] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.430528] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.430632] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Successfully created port: 1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.472023] env[69994]: INFO nova.compute.manager [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Took 29.12 seconds to build instance. [ 665.652282] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925061, 'name': PowerOffVM_Task, 'duration_secs': 0.361816} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.655770] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.656260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 665.794886] env[69994]: INFO nova.compute.manager [-] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Took 2.01 seconds to deallocate network for instance. [ 665.809771] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.827170] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.827170] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.827580] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e1cce5-3c5c-4bb7-a65c-c5ec6b5c7267 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.856805] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] f3ae584d-18a5-4bbe-b4bf-860e2332b324/f3ae584d-18a5-4bbe-b4bf-860e2332b324.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.862303] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-328e0640-47fd-4637-a312-215dafd6c1cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.891117] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.892068] env[69994]: DEBUG nova.network.neutron [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Updated VIF entry in instance network info cache for port c2407183-ab55-4108-a4c9-1fb48e727a35. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 665.892413] env[69994]: DEBUG nova.network.neutron [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Updating instance_info_cache with network_info: [{"id": "c2407183-ab55-4108-a4c9-1fb48e727a35", "address": "fa:16:3e:76:96:bd", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2407183-ab", "ovs_interfaceid": "c2407183-ab55-4108-a4c9-1fb48e727a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.907090] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:30:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1321307962',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1912713107',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.907090] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.907090] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.907404] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.907404] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.907404] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.907404] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.907404] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.907646] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.907646] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.907646] env[69994]: DEBUG nova.virt.hardware [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.907646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.907646] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 665.907646] env[69994]: value = "task-2925065" [ 665.907646] env[69994]: _type = "Task" [ 665.907646] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.909342] env[69994]: DEBUG nova.scheduler.client.report [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.914046] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4818d2f1-12e3-4a33-b1f1-ba2822473543 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.916863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec87b5a-9f62-4473-9905-9cf2ebafba78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.949065] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.956367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2191b2e-5303-4c60-8186-2d03188e0c8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.960841] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8eef2e8b-26ca-41d0-a9bd-eef890f0596c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.974932] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925064, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.975358] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.975930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba9e26bb-97ef-4147-aa6b-05ae5654d1d7 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.989318] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 665.989318] env[69994]: value = "task-2925066" [ 665.989318] env[69994]: _type = "Task" [ 665.989318] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.999961] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.166026] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.166026] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.166026] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.166026] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.166471] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.167809] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.167809] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.167809] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.167809] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.167809] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.168191] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.175593] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ad800f1-dd69-4e81-85ba-ac8e301c39d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.194571] env[69994]: DEBUG nova.compute.manager [req-a1266ada-912d-4ea8-9181-a60803111ce0 req-a33d871d-cea4-4b20-9039-d5d9310a5ffa service nova] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Received event network-vif-deleted-70b00ea1-8747-4c12-8a6c-5abf6c3669f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.204769] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 666.204769] env[69994]: value = "task-2925067" [ 666.204769] env[69994]: _type = "Task" [ 666.204769] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.225185] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.310408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.399515] env[69994]: DEBUG oslo_concurrency.lockutils [req-d000bfaf-8fe2-4d76-941b-880964730c4d req-fe355445-d23e-4b24-aa88-88614d57387a service nova] Releasing lock "refresh_cache-803e9885-000f-4696-9fb9-03361ef46538" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.417601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.417601] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 666.423167] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.863s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.427880] env[69994]: DEBUG nova.objects.instance [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lazy-loading 'resources' on Instance uuid 1d5b8fb7-eeb0-49da-acdf-53b7741e863e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 666.455526] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.455927] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925064, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.478544] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 666.505629] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.718113] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925067, 'name': ReconfigVM_Task, 'duration_secs': 0.339648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.718830] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 666.929985] env[69994]: DEBUG nova.compute.utils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 666.948274] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 666.948605] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 666.958697] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925065, 'name': ReconfigVM_Task, 'duration_secs': 0.760327} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.961746] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Reconfigured VM instance instance-0000000e to attach disk [datastore1] f3ae584d-18a5-4bbe-b4bf-860e2332b324/f3ae584d-18a5-4bbe-b4bf-860e2332b324.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.962438] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925064, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.118218} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.962966] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dccf804e-4a7a-44de-b68f-f86f81f42c85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.965117] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 803e9885-000f-4696-9fb9-03361ef46538/803e9885-000f-4696-9fb9-03361ef46538.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 666.965117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.965410] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffef155d-ac57-4767-9d2e-dda246423dee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.981588] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 666.981588] env[69994]: value = "task-2925068" [ 666.981588] env[69994]: _type = "Task" [ 666.981588] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.983238] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 666.983238] env[69994]: value = "task-2925069" [ 666.983238] env[69994]: _type = "Task" [ 666.983238] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.008158] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925068, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.013402] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.014590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.019640] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925066, 'name': ReconfigVM_Task, 'duration_secs': 0.594138} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.022542] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3/316ab41e-d3c1-4cef-8d63-a138e21d0ea3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.023639] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33f37cad-470f-4f24-bf42-5e382eb4ca94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.031925] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 667.031925] env[69994]: value = "task-2925070" [ 667.031925] env[69994]: _type = "Task" [ 667.031925] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.055284] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925070, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.071582] env[69994]: DEBUG nova.policy [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa99d0f621904bb5b50eb7ce5cb04381', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '814a1036385043699ac1895c67d74188', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 667.233826] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.233826] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.234144] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.234212] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.234339] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.234546] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.234766] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.234950] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.235146] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.235313] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.235609] env[69994]: DEBUG nova.virt.hardware [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.240988] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 667.243945] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7bf7bde-c35e-47ab-8eb1-8689d00fad06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.266509] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 667.266509] env[69994]: value = "task-2925071" [ 667.266509] env[69994]: _type = "Task" [ 667.266509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.276634] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.437907] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 667.498795] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925068, 'name': Rename_Task, 'duration_secs': 0.192074} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.500368] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.500368] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe97fb2f-ce2c-40c2-87f2-26f4900d073e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.507311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a154643-a1fe-4af0-869e-aa3abc8137ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.515019] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.515425] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 667.515425] env[69994]: value = "task-2925072" [ 667.515425] env[69994]: _type = "Task" [ 667.515425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.516203] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 667.517535] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41012aa8-08f5-45ba-9bd1-47ffb2cd4a0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.528026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798cbf3b-2281-4c54-b879-3990fac12e50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.554719] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 803e9885-000f-4696-9fb9-03361ef46538/803e9885-000f-4696-9fb9-03361ef46538.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 667.555526] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.559436] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28c2234a-72c2-4d39-896b-f81f0b9551e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.603829] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbc420b-9c19-46a5-9488-0b479e8c37ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.609801] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925070, 'name': Rename_Task, 'duration_secs': 0.156347} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.611526] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.611948] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 667.611948] env[69994]: value = "task-2925073" [ 667.611948] env[69994]: _type = "Task" [ 667.611948] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.612339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7086ef9-91ab-4d47-9990-d8343b7d9bd1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.625436] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4886e668-de34-45b8-a729-7cd261a1f173 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.630017] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.631640] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Waiting for the task: (returnval){ [ 667.631640] env[69994]: value = "task-2925074" [ 667.631640] env[69994]: _type = "Task" [ 667.631640] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.650381] env[69994]: DEBUG nova.compute.provider_tree [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.655277] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.777914] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925071, 'name': ReconfigVM_Task, 'duration_secs': 0.272155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.778926] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Successfully updated port: 1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.779881] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 667.781862] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb67ea80-983d-4299-8fc8-4388b126633c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.809240] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 667.809923] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5d98125-ff01-4fee-8d39-8010e4495305 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.831582] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 667.831582] env[69994]: value = "task-2925075" [ 667.831582] env[69994]: _type = "Task" [ 667.831582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.842384] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925075, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.032963] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925072, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.044317] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Successfully created port: c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.125633] env[69994]: DEBUG nova.compute.manager [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Received event network-vif-plugged-1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 668.126999] env[69994]: DEBUG oslo_concurrency.lockutils [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] Acquiring lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.127530] env[69994]: DEBUG oslo_concurrency.lockutils [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.127780] env[69994]: DEBUG oslo_concurrency.lockutils [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.128107] env[69994]: DEBUG nova.compute.manager [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] No waiting events found dispatching network-vif-plugged-1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 668.129524] env[69994]: WARNING nova.compute.manager [req-7c624f2d-3ce3-4591-b30e-631c9e5f1a06 req-8cd73aa9-7608-4650-9de9-54124e3fbdd2 service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Received unexpected event network-vif-plugged-1acb2297-91d5-4675-bbec-1c950d6cd544 for instance with vm_state building and task_state spawning. [ 668.133956] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925073, 'name': ReconfigVM_Task, 'duration_secs': 0.441357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.138468] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 803e9885-000f-4696-9fb9-03361ef46538/803e9885-000f-4696-9fb9-03361ef46538.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 668.140047] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a05e465a-b28a-4f47-a1af-965086e03b14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.152253] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925074, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.154565] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 668.154565] env[69994]: value = "task-2925076" [ 668.154565] env[69994]: _type = "Task" [ 668.154565] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.159898] env[69994]: DEBUG nova.scheduler.client.report [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 668.169827] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925076, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.281962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.282215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.282450] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.354592] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.455417] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 668.483623] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 668.483623] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.487095] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 668.487095] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.487095] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 668.487095] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 668.487095] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 668.487285] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 668.487285] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 668.487285] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 668.487285] env[69994]: DEBUG nova.virt.hardware [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 668.487285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d11953-8868-46f7-8f26-0f9dc1d059a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.497518] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00363434-b00d-4227-a4f0-b2ee770a182b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.529191] env[69994]: DEBUG oslo_vmware.api [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925072, 'name': PowerOnVM_Task, 'duration_secs': 0.949605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.532163] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.532163] env[69994]: INFO nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Took 10.65 seconds to spawn the instance on the hypervisor. [ 668.532163] env[69994]: DEBUG nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.532163] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b08b07-c544-41f5-a08e-cd708e379322 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.553242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.553949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.647102] env[69994]: DEBUG oslo_vmware.api [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Task: {'id': task-2925074, 'name': PowerOnVM_Task, 'duration_secs': 0.56596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.647739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.648147] env[69994]: DEBUG nova.compute.manager [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.649338] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5e471f-f9f6-4d8f-b388-68fa4cfdad09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.666398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.243s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.668918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.669172] env[69994]: DEBUG nova.objects.instance [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lazy-loading 'resources' on Instance uuid dc548f2f-e6d6-4273-8c24-b4f52842e0d2 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 668.677138] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925076, 'name': Rename_Task, 'duration_secs': 0.231209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.677911] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 668.678393] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cc9825e-97eb-4106-a07a-bab14a12b6c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.687828] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 668.687828] env[69994]: value = "task-2925077" [ 668.687828] env[69994]: _type = "Task" [ 668.687828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.699955] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.702554] env[69994]: INFO nova.scheduler.client.report [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Deleted allocations for instance 1d5b8fb7-eeb0-49da-acdf-53b7741e863e [ 668.852498] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925075, 'name': ReconfigVM_Task, 'duration_secs': 0.657423} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.853513] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.856131] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c/63d6a59a-d58c-4179-ad39-eb9863e6f84c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 668.857024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 669.015552] env[69994]: DEBUG nova.compute.manager [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Received event network-changed-d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.015738] env[69994]: DEBUG nova.compute.manager [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Refreshing instance network info cache due to event network-changed-d5b5dcfa-33de-47f7-8356-2384f6ed2083. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.015959] env[69994]: DEBUG oslo_concurrency.lockutils [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] Acquiring lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.019020] env[69994]: DEBUG oslo_concurrency.lockutils [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] Acquired lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.019020] env[69994]: DEBUG nova.network.neutron [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Refreshing network info cache for port d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.058821] env[69994]: INFO nova.compute.manager [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Took 31.65 seconds to build instance. [ 669.120310] env[69994]: DEBUG nova.network.neutron [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updating instance_info_cache with network_info: [{"id": "1acb2297-91d5-4675-bbec-1c950d6cd544", "address": "fa:16:3e:1e:06:87", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1acb2297-91", "ovs_interfaceid": "1acb2297-91d5-4675-bbec-1c950d6cd544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.176949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.202114] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925077, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.209718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98066a85-e655-49be-adbb-e66a13d11bf1 tempest-DeleteServersAdminTestJSON-692851787 tempest-DeleteServersAdminTestJSON-692851787-project-admin] Lock "1d5b8fb7-eeb0-49da-acdf-53b7741e863e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.736s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.365986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cd36fc-c4c1-4ea8-98cc-c05b19dd9318 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.399032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cc8abb-8e29-40c6-b01a-ce9050348854 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.423200] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 669.561952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5712a512-6577-48b4-957a-56ac89ec45f0 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.378s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.611645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.611893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.625766] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.625766] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Instance network_info: |[{"id": "1acb2297-91d5-4675-bbec-1c950d6cd544", "address": "fa:16:3e:1e:06:87", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1acb2297-91", "ovs_interfaceid": "1acb2297-91d5-4675-bbec-1c950d6cd544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 669.625942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:06:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f44b2fa3-6730-4b87-8839-947eff21213f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1acb2297-91d5-4675-bbec-1c950d6cd544', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.634732] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Creating folder: Project (3cd7386da3414f198142cee5c6d383b0). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.635402] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3668844c-8e08-46e6-8e6e-f22544ca59b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.658373] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Created folder: Project (3cd7386da3414f198142cee5c6d383b0) in parent group-v587342. [ 669.658373] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Creating folder: Instances. Parent ref: group-v587388. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.660842] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08a33fef-b82c-4249-a6e5-1b7c9e224fca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.678064] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Created folder: Instances in parent group-v587388. [ 669.678186] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.678376] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 669.678595] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-944b668f-8f66-48c8-97db-c191899a6d30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.709034] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925077, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.709034] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 669.709034] env[69994]: value = "task-2925080" [ 669.709034] env[69994]: _type = "Task" [ 669.709034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.719162] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925080, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.790565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cc640a-73d1-47b1-b3af-ac9cb3c2d091 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.803094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c264e83-081d-4ca1-bd9a-2205d111aa05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.840024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d0e1b5-b403-4b81-92c1-c158e5b2a0e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.846525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49786d7-0d39-44d2-90a6-5ed0481cdaca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.861960] env[69994]: DEBUG nova.compute.provider_tree [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.014859] env[69994]: DEBUG nova.network.neutron [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updated VIF entry in instance network info cache for port d5b5dcfa-33de-47f7-8356-2384f6ed2083. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.015388] env[69994]: DEBUG nova.network.neutron [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updating instance_info_cache with network_info: [{"id": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "address": "fa:16:3e:fb:05:0b", "network": {"id": "1b416d5a-e592-4170-a80b-d0406b3d7cbe", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-575332336-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e32425291ea4111ad7aae069c945b1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b5dcfa-33", "ovs_interfaceid": "d5b5dcfa-33de-47f7-8356-2384f6ed2083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.027941] env[69994]: DEBUG nova.network.neutron [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Port 6634b7a0-01a3-49e4-a7ac-6f8572d86925 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 670.066934] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 670.126560] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Successfully updated port: c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.208643] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925077, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.218941] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925080, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.368345] env[69994]: DEBUG nova.scheduler.client.report [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.518575] env[69994]: DEBUG oslo_concurrency.lockutils [req-b447913d-7ab0-4605-8d4c-01b5028fb9fb req-20ac15fa-e685-4ec5-bbdd-c30d20da8e23 service nova] Releasing lock "refresh_cache-75e952e7-6761-49a4-9193-175f5d30494e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.530019] env[69994]: DEBUG nova.compute.manager [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Received event network-changed-1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.530242] env[69994]: DEBUG nova.compute.manager [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Refreshing instance network info cache due to event network-changed-1acb2297-91d5-4675-bbec-1c950d6cd544. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 670.530455] env[69994]: DEBUG oslo_concurrency.lockutils [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] Acquiring lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.530595] env[69994]: DEBUG oslo_concurrency.lockutils [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] Acquired lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.530756] env[69994]: DEBUG nova.network.neutron [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Refreshing network info cache for port 1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.594016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.629768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.629923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquired lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.631114] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.709826] env[69994]: DEBUG oslo_vmware.api [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925077, 'name': PowerOnVM_Task, 'duration_secs': 1.929875} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.714949] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.715234] env[69994]: INFO nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Took 10.15 seconds to spawn the instance on the hypervisor. [ 670.715435] env[69994]: DEBUG nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.718608] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8fb471-91d8-4255-8263-077eb8dfeb9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.733369] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925080, 'name': CreateVM_Task, 'duration_secs': 0.631072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.733888] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.734841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.735075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.735475] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.735776] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d48c8c92-a27e-4576-9b78-493ae9878d0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.743420] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 670.743420] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528532e1-cce8-7817-83ad-0299408606af" [ 670.743420] env[69994]: _type = "Task" [ 670.743420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.755405] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528532e1-cce8-7817-83ad-0299408606af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.825101] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.825285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.825802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.825802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.825802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.827716] env[69994]: INFO nova.compute.manager [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Terminating instance [ 670.874909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.879292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.713s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.879641] env[69994]: DEBUG nova.objects.instance [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lazy-loading 'resources' on Instance uuid 1232f601-3339-4fc2-92b2-aa550af90b01 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 670.896194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.896460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.901489] env[69994]: INFO nova.scheduler.client.report [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Deleted allocations for instance dc548f2f-e6d6-4273-8c24-b4f52842e0d2 [ 671.057493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.058158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.058343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.183526] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.253963] env[69994]: INFO nova.compute.manager [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Took 31.68 seconds to build instance. [ 671.262837] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528532e1-cce8-7817-83ad-0299408606af, 'name': SearchDatastore_Task, 'duration_secs': 0.019885} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.262837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.262942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.264669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.264669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.264669] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.264669] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad80920-755e-4ce9-afcb-65c8ae91b907 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.277358] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.277568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 671.278386] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e557cb50-2677-47a3-9097-1eb5c3238835 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.288059] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 671.288059] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520e80be-7fa9-4e8a-34d5-7b5973291ca4" [ 671.288059] env[69994]: _type = "Task" [ 671.288059] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.303711] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520e80be-7fa9-4e8a-34d5-7b5973291ca4, 'name': SearchDatastore_Task, 'duration_secs': 0.011746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.304669] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8197605-2807-46a9-89ba-d1e89c718180 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.311715] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 671.311715] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e8d482-fdc7-2df5-f383-8742b746e501" [ 671.311715] env[69994]: _type = "Task" [ 671.311715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.324608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.324608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.324907] env[69994]: DEBUG nova.compute.manager [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.325140] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e8d482-fdc7-2df5-f383-8742b746e501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.326019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f44e6c-dd71-42e8-9c4a-f21d9dd15d00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.331606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "refresh_cache-316ab41e-d3c1-4cef-8d63-a138e21d0ea3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.331804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquired lock "refresh_cache-316ab41e-d3c1-4cef-8d63-a138e21d0ea3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.331972] env[69994]: DEBUG nova.network.neutron [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.335233] env[69994]: DEBUG nova.compute.manager [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 671.336121] env[69994]: DEBUG nova.objects.instance [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'flavor' on Instance uuid f3ae584d-18a5-4bbe-b4bf-860e2332b324 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 671.409625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61c1dc97-010d-4a62-a5e7-62a1eaa6f072 tempest-ServerDiagnosticsTest-1840360730 tempest-ServerDiagnosticsTest-1840360730-project-member] Lock "dc548f2f-e6d6-4273-8c24-b4f52842e0d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.005s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.504898] env[69994]: DEBUG nova.network.neutron [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updated VIF entry in instance network info cache for port 1acb2297-91d5-4675-bbec-1c950d6cd544. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.505068] env[69994]: DEBUG nova.network.neutron [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updating instance_info_cache with network_info: [{"id": "1acb2297-91d5-4675-bbec-1c950d6cd544", "address": "fa:16:3e:1e:06:87", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1acb2297-91", "ovs_interfaceid": "1acb2297-91d5-4675-bbec-1c950d6cd544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.517453] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Received event network-changed-68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.517695] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Refreshing instance network info cache due to event network-changed-68b29b35-015e-4545-af50-70655d1914db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.517839] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Acquiring lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.518059] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Acquired lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.518365] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Refreshing network info cache for port 68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.712204] env[69994]: DEBUG nova.network.neutron [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Updating instance_info_cache with network_info: [{"id": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "address": "fa:16:3e:18:8c:fa", "network": {"id": "2ae2b24b-719f-46ca-9855-58cc936f6147", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-94358001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "814a1036385043699ac1895c67d74188", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26583c0-ad", "ovs_interfaceid": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.756716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b93b4d41-7de7-4731-ac33-8e7b23ac5b1b tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.117s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.828669] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e8d482-fdc7-2df5-f383-8742b746e501, 'name': SearchDatastore_Task, 'duration_secs': 0.012056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.829133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.829528] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6/8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.830729] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a0152d2-9e03-4b8f-97fd-cdfb81f45273 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.853915] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 671.853915] env[69994]: value = "task-2925081" [ 671.853915] env[69994]: _type = "Task" [ 671.853915] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.864911] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.893288] env[69994]: DEBUG nova.network.neutron [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.913967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327c8b86-50a6-4a4a-a259-19ba3db0689b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.927460] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d65d74-69a3-4fce-bdcd-746e0e45c9dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.968558] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16a25fb-2f25-43ef-b846-d1699949352d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.977517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26efb2a9-7839-4033-ad97-c8c51834983f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.994592] env[69994]: DEBUG nova.compute.provider_tree [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.007525] env[69994]: DEBUG oslo_concurrency.lockutils [req-842bd2cf-7c97-41bf-86d3-fb13f55ac8a5 req-be823952-825f-4ba9-bc20-fa626cb5046b service nova] Releasing lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.083944] env[69994]: DEBUG nova.network.neutron [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.188841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.188841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.188841] env[69994]: DEBUG nova.network.neutron [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.217863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Releasing lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.218855] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Instance network_info: |[{"id": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "address": "fa:16:3e:18:8c:fa", "network": {"id": "2ae2b24b-719f-46ca-9855-58cc936f6147", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-94358001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "814a1036385043699ac1895c67d74188", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26583c0-ad", "ovs_interfaceid": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.219018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:8c:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c26583c0-ad13-4a74-adc9-deb6e535a3f0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.230493] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Creating folder: Project (814a1036385043699ac1895c67d74188). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.235370] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0793227-ca1a-4b7c-932b-14de8b27d5e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.254363] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Created folder: Project (814a1036385043699ac1895c67d74188) in parent group-v587342. [ 672.254675] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Creating folder: Instances. Parent ref: group-v587391. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.254970] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da3ce927-9abb-4792-98ec-d928414c28c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.263179] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.271683] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Created folder: Instances in parent group-v587391. [ 672.271683] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.271683] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.271683] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd84ffd2-764b-4c9b-9029-f946bdf212f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.302684] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.302684] env[69994]: value = "task-2925084" [ 672.302684] env[69994]: _type = "Task" [ 672.302684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.314614] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925084, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.356931] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.357624] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34f6ce72-cbef-4312-8e1e-42926148ee50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.373619] env[69994]: DEBUG oslo_vmware.api [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 672.373619] env[69994]: value = "task-2925085" [ 672.373619] env[69994]: _type = "Task" [ 672.373619] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.383758] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925081, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.398110] env[69994]: DEBUG oslo_vmware.api [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.497934] env[69994]: DEBUG nova.scheduler.client.report [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.522323] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updated VIF entry in instance network info cache for port 68b29b35-015e-4545-af50-70655d1914db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.522557] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updating instance_info_cache with network_info: [{"id": "68b29b35-015e-4545-af50-70655d1914db", "address": "fa:16:3e:fb:a3:d3", "network": {"id": "51754dc8-5636-4471-80e9-79d9743ed5d8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-735809087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a27eadbf075948a38f37e6a97f1db130", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68b29b35-01", "ovs_interfaceid": "68b29b35-015e-4545-af50-70655d1914db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.588032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Releasing lock "refresh_cache-316ab41e-d3c1-4cef-8d63-a138e21d0ea3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.588515] env[69994]: DEBUG nova.compute.manager [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 672.588713] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.589683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d3eb8c-e6a1-4486-b7ab-516f8d1d7549 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.606276] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.606276] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2505be7-bdd4-4c1e-97b7-53dd49da99c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.615053] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 672.615053] env[69994]: value = "task-2925086" [ 672.615053] env[69994]: _type = "Task" [ 672.615053] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.634352] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.793638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.820964] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925084, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.873599] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768688} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.873836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6/8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.873922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.874222] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bda91af8-d792-4b0f-b99a-76827a6ccf72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.891178] env[69994]: DEBUG oslo_vmware.api [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925085, 'name': PowerOffVM_Task, 'duration_secs': 0.386748} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.891517] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 672.891517] env[69994]: value = "task-2925087" [ 672.891517] env[69994]: _type = "Task" [ 672.891517] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.894745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 672.895076] env[69994]: DEBUG nova.compute.manager [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 672.898559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8a9a0b-ade4-4c72-a725-2e7ab62f3f01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.913680] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.009137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.127s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.009137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.929s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.009545] env[69994]: DEBUG nova.objects.instance [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lazy-loading 'resources' on Instance uuid 317e3366-4aec-4c80-bcf9-df84bc5e9939 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 673.025709] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Releasing lock "refresh_cache-9e9973e1-feb8-4fd7-95ae-e6d824af5a64" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.029025] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Received event network-vif-plugged-c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.029025] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Acquiring lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.029025] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.029025] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.029025] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] No waiting events found dispatching network-vif-plugged-c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.029289] env[69994]: WARNING nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Received unexpected event network-vif-plugged-c26583c0-ad13-4a74-adc9-deb6e535a3f0 for instance with vm_state building and task_state spawning. [ 673.029289] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Received event network-changed-c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.029289] env[69994]: DEBUG nova.compute.manager [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Refreshing instance network info cache due to event network-changed-c26583c0-ad13-4a74-adc9-deb6e535a3f0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 673.029289] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Acquiring lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.029289] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Acquired lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.029455] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Refreshing network info cache for port c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.036545] env[69994]: INFO nova.scheduler.client.report [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Deleted allocations for instance 1232f601-3339-4fc2-92b2-aa550af90b01 [ 673.102907] env[69994]: DEBUG nova.network.neutron [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.128049] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925086, 'name': PowerOffVM_Task, 'duration_secs': 0.171819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.128602] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 673.128927] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 673.129398] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd3ea9bc-427f-4688-959f-62bc946cf08a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.167069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 673.167069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 673.167069] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleting the datastore file [datastore2] 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.167069] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75127041-61e5-4542-8b2a-26a095e54e57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.174945] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for the task: (returnval){ [ 673.174945] env[69994]: value = "task-2925089" [ 673.174945] env[69994]: _type = "Task" [ 673.174945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.187803] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.317544] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925084, 'name': CreateVM_Task, 'duration_secs': 0.691049} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.317800] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.319063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.319372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.319890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.320286] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f71ed02e-d149-4f68-86e9-41d0a62285a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.328193] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 673.328193] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aef1e7-8a5a-f295-1baa-c43e28f5bd5b" [ 673.328193] env[69994]: _type = "Task" [ 673.328193] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.343550] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aef1e7-8a5a-f295-1baa-c43e28f5bd5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.405973] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109663} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.406300] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.407114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4824d348-8676-48c5-995b-a964e03c45ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.436328] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6/8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.437017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-172198ec-7c14-496f-8a8b-24efe16aa4d5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.112s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.440861] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58016385-2489-487a-8c70-014fff2a8dfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.461130] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 673.461130] env[69994]: value = "task-2925090" [ 673.461130] env[69994]: _type = "Task" [ 673.461130] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.471452] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.546867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cfec4e-9e79-4b8f-90a3-ea31e4cb27f0 tempest-ServerDiagnosticsNegativeTest-2057812385 tempest-ServerDiagnosticsNegativeTest-2057812385-project-member] Lock "1232f601-3339-4fc2-92b2-aa550af90b01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.045s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.607338] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.687197] env[69994]: DEBUG oslo_vmware.api [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Task: {'id': task-2925089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105386} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.689820] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.690022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 673.690203] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.690371] env[69994]: INFO nova.compute.manager [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Took 1.10 seconds to destroy the instance on the hypervisor. [ 673.690664] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.692881] env[69994]: DEBUG nova.compute.manager [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 673.692881] env[69994]: DEBUG nova.network.neutron [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.716250] env[69994]: DEBUG nova.network.neutron [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.839370] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aef1e7-8a5a-f295-1baa-c43e28f5bd5b, 'name': SearchDatastore_Task, 'duration_secs': 0.011234} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.844805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.844805] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.845031] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.845031] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.845607] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.845658] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-928f559e-794a-4e96-93db-dd38238280e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.864031] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.864031] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.864031] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36da8f1b-8f84-4811-860f-9307a1465962 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.873102] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 673.873102] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5295a48f-98a4-8a1d-e719-46d02c1abd9b" [ 673.873102] env[69994]: _type = "Task" [ 673.873102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.883249] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5295a48f-98a4-8a1d-e719-46d02c1abd9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.916675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.916675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.970554] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Updated VIF entry in instance network info cache for port c26583c0-ad13-4a74-adc9-deb6e535a3f0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 673.971186] env[69994]: DEBUG nova.network.neutron [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Updating instance_info_cache with network_info: [{"id": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "address": "fa:16:3e:18:8c:fa", "network": {"id": "2ae2b24b-719f-46ca-9855-58cc936f6147", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-94358001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "814a1036385043699ac1895c67d74188", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc26583c0-ad", "ovs_interfaceid": "c26583c0-ad13-4a74-adc9-deb6e535a3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.975332] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.012934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54aac9b5-b1cb-4408-a9bc-d318bb3ce551 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.021895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b991aa7b-4a0a-4d22-8901-7b751ade9f5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.055898] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3c5918-5350-4517-94b5-fe67cc742203 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.065137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac3b5f5-2e14-45ac-9d8f-b76ca5b5453f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.083286] env[69994]: DEBUG nova.compute.provider_tree [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.132237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409aa745-3940-427b-89d3-806a95250628 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.157993] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3901057-0608-49c4-a0a0-e7b072360212 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.167280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 674.219550] env[69994]: DEBUG nova.network.neutron [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.389513] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5295a48f-98a4-8a1d-e719-46d02c1abd9b, 'name': SearchDatastore_Task, 'duration_secs': 0.027017} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.391133] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2410d05-8f25-4669-b8e2-a6aabd5a7285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.399468] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 674.399468] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d7a68d-e138-e0e8-e874-6dbfb0154415" [ 674.399468] env[69994]: _type = "Task" [ 674.399468] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.412130] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d7a68d-e138-e0e8-e874-6dbfb0154415, 'name': SearchDatastore_Task} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.412483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.412803] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9717f586-cedc-4f21-9ea6-7bf6e2991327/9717f586-cedc-4f21-9ea6-7bf6e2991327.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.413232] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55267d63-a95c-44b7-bb11-af616dcbd26a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.422623] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 674.422623] env[69994]: value = "task-2925091" [ 674.422623] env[69994]: _type = "Task" [ 674.422623] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.441698] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.472857] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925090, 'name': ReconfigVM_Task, 'duration_secs': 0.991384} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.473387] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6/8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.474079] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8ac0002-68b9-4e93-b620-82fda7280ed7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.476072] env[69994]: DEBUG oslo_concurrency.lockutils [req-119164f2-4b34-44a5-8c55-0a1e13a5923f req-79d888ec-a836-4fb7-bba7-2457f671ea7b service nova] Releasing lock "refresh_cache-9717f586-cedc-4f21-9ea6-7bf6e2991327" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.484368] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 674.484368] env[69994]: value = "task-2925092" [ 674.484368] env[69994]: _type = "Task" [ 674.484368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.500571] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925092, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.589233] env[69994]: DEBUG nova.scheduler.client.report [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.678497] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.679228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a378efa1-edcf-473b-aad7-89939c041120 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.689988] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 674.689988] env[69994]: value = "task-2925093" [ 674.689988] env[69994]: _type = "Task" [ 674.689988] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.702332] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.725132] env[69994]: INFO nova.compute.manager [-] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Took 1.03 seconds to deallocate network for instance. [ 674.934802] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492939} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.935199] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9717f586-cedc-4f21-9ea6-7bf6e2991327/9717f586-cedc-4f21-9ea6-7bf6e2991327.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.935299] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.935556] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1591274-a718-448f-9235-576849373491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.949727] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 674.949727] env[69994]: value = "task-2925094" [ 674.949727] env[69994]: _type = "Task" [ 674.949727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.960442] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.999415] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925092, 'name': Rename_Task, 'duration_secs': 0.182979} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.999740] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.999980] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fb3fb5a-edd7-47ba-8311-bfc822ab9688 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.011767] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 675.011767] env[69994]: value = "task-2925095" [ 675.011767] env[69994]: _type = "Task" [ 675.011767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.021672] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.101690] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.103473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.195s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.108216] env[69994]: INFO nova.compute.claims [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.144113] env[69994]: INFO nova.scheduler.client.report [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Deleted allocations for instance 317e3366-4aec-4c80-bcf9-df84bc5e9939 [ 675.207423] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925093, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.232050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.463277] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.463611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.464443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c6515a-1dc2-4569-af72-69c74ebd6587 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.495196] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 9717f586-cedc-4f21-9ea6-7bf6e2991327/9717f586-cedc-4f21-9ea6-7bf6e2991327.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.496106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4cfef1e-e1a1-41a1-95bb-69b74204350f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.519503] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 675.519503] env[69994]: value = "task-2925096" [ 675.519503] env[69994]: _type = "Task" [ 675.519503] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.525206] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.537627] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925096, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.663314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4c998281-4105-4d05-ac00-645fd2a1323f tempest-TenantUsagesTestJSON-1894317245 tempest-TenantUsagesTestJSON-1894317245-project-member] Lock "317e3366-4aec-4c80-bcf9-df84bc5e9939" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.141s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.713429] env[69994]: DEBUG oslo_vmware.api [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925093, 'name': PowerOnVM_Task, 'duration_secs': 0.54863} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.714939] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.714939] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd834b3-efbf-435e-aa1c-64c52d46cd2b tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance '63d6a59a-d58c-4179-ad39-eb9863e6f84c' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 676.031958] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.041750] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.202242] env[69994]: DEBUG nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.202889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa4fd3c-bcf6-4e02-b18b-82022fe1fd8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.533492] env[69994]: DEBUG oslo_vmware.api [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925095, 'name': PowerOnVM_Task, 'duration_secs': 1.472076} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.533492] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.533492] env[69994]: INFO nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Took 10.72 seconds to spawn the instance on the hypervisor. [ 676.533492] env[69994]: DEBUG nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.536851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0074a93-dfaa-46f6-93a9-aa8813c49f84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.553680] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925096, 'name': ReconfigVM_Task, 'duration_secs': 0.89318} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.554202] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 9717f586-cedc-4f21-9ea6-7bf6e2991327/9717f586-cedc-4f21-9ea6-7bf6e2991327.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.554815] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b61656fc-d4cc-420d-a5f9-7220141a4a9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.563766] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 676.563766] env[69994]: value = "task-2925097" [ 676.563766] env[69994]: _type = "Task" [ 676.563766] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.576260] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925097, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.675996] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685b208b-2708-4033-847a-06a939dceafb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.686364] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9696f9-6ee9-4378-aa38-37c004ae8df1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.723199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f11100-64f0-4f1c-8252-65cc16e66bf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.729285] env[69994]: INFO nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] instance snapshotting [ 676.729285] env[69994]: WARNING nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 676.743051] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3a8dd4-d8f3-4453-8811-7a38f6f520f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.749459] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aad5252-37fb-4635-9f21-77504a2f3f5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.764868] env[69994]: DEBUG nova.compute.provider_tree [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.782092] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80390ecb-74fb-415e-8364-117ed13b34c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.062978] env[69994]: INFO nova.compute.manager [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Took 35.11 seconds to build instance. [ 677.081825] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925097, 'name': Rename_Task, 'duration_secs': 0.266058} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.083049] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.083722] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5be5e117-50e5-49cc-ab80-67b1456dce29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.092056] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 677.092056] env[69994]: value = "task-2925098" [ 677.092056] env[69994]: _type = "Task" [ 677.092056] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.103650] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.290349] env[69994]: DEBUG nova.scheduler.client.report [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.295333] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 677.295926] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f78cc5af-e4f8-44c7-9e34-2c02a06f7c8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.308594] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 677.308594] env[69994]: value = "task-2925099" [ 677.308594] env[69994]: _type = "Task" [ 677.308594] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.321228] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925099, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.565276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c98f63a1-62fa-41fc-80c7-da9ab97fe14e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.618s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.605946] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925098, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.799138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.800256] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 677.808189] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.164s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.809845] env[69994]: INFO nova.compute.claims [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.841208] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925099, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.070740] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 678.115701] env[69994]: DEBUG oslo_vmware.api [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925098, 'name': PowerOnVM_Task, 'duration_secs': 0.629016} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.115998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.116280] env[69994]: INFO nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Took 9.66 seconds to spawn the instance on the hypervisor. [ 678.116463] env[69994]: DEBUG nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.117276] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65885df4-b156-4592-a012-05d04b1183a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.331472] env[69994]: DEBUG nova.compute.utils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.335336] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.335336] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.355431] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925099, 'name': CreateSnapshot_Task, 'duration_secs': 0.63778} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.356458] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 678.357312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8739144-6b86-4637-a5b1-e14d5dc7cc0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.411700] env[69994]: DEBUG nova.policy [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fec48b3a663741fe9c701ab7e2105b29', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa48a4bba9de4f50b8ed79f61dd5d4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.417982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.417982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.417982] env[69994]: DEBUG nova.compute.manager [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Going to confirm migration 1 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 678.599848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.641209] env[69994]: INFO nova.compute.manager [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Took 36.12 seconds to build instance. [ 678.836582] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 678.853143] env[69994]: DEBUG nova.compute.manager [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.854021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45da02f8-3d71-46e6-922c-743c3b3c56b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.881091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 678.881640] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-09376a82-a7cc-41d2-bb29-1167a1174bc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.893765] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 678.893765] env[69994]: value = "task-2925100" [ 678.893765] env[69994]: _type = "Task" [ 678.893765] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.911361] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925100, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.095694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.098982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.098982] env[69994]: DEBUG nova.network.neutron [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.098982] env[69994]: DEBUG nova.objects.instance [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lazy-loading 'info_cache' on Instance uuid 63d6a59a-d58c-4179-ad39-eb9863e6f84c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.142193] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Successfully created port: 35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.147094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c68388d4-0375-46d4-bb84-2747c62605e8 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.899s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.383789] env[69994]: INFO nova.compute.manager [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] instance snapshotting [ 679.388545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e75cd9e-3f70-45d2-82f5-f0d948f907ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.428115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44288aa2-1cb0-454e-bdba-ddc1045445a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.434375] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45716ab1-be91-43a4-828e-4c7e5f248d64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.449100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3a0da1-7133-450f-a859-b8dea0428cfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.452550] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925100, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.488027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d867f71-4106-4a26-b651-2495e3615364 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.498275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d0472-59fd-42f2-8e58-10d930cb0b27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.513478] env[69994]: DEBUG nova.compute.provider_tree [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.653128] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.850938] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 679.886361] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 679.886686] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.886848] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 679.888338] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.888557] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 679.888715] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 679.888936] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 679.889112] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 679.889281] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 679.889487] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 679.889612] env[69994]: DEBUG nova.virt.hardware [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 679.890587] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072d43ca-2ddf-43b2-b62c-a6560e0564fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.901550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4755ca7f-d837-441c-8f48-74a54306921a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.931485] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925100, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.957591] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 679.957932] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ffa24255-ed87-44fe-b341-cc61dab2d70a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.967780] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 679.967780] env[69994]: value = "task-2925101" [ 679.967780] env[69994]: _type = "Task" [ 679.967780] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.978144] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925101, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.017080] env[69994]: DEBUG nova.scheduler.client.report [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.175454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.437081] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925100, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.479484] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925101, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.524148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.524148] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.530366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.260s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.530754] env[69994]: INFO nova.compute.claims [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.895546] env[69994]: DEBUG nova.network.neutron [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [{"id": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "address": "fa:16:3e:dc:4a:90", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6634b7a0-01", "ovs_interfaceid": "6634b7a0-01a3-49e4-a7ac-6f8572d86925", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.936293] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925100, 'name': CloneVM_Task, 'duration_secs': 1.657213} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.936650] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Created linked-clone VM from snapshot [ 680.937433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9c6ffd-722f-4613-9533-889dc016a284 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.948033] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Uploading image 3b72b40e-7308-45f8-b5ff-72de11dda04c {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 680.986029] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925101, 'name': CreateSnapshot_Task, 'duration_secs': 0.701277} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.989418] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 680.989418] env[69994]: value = "vm-587395" [ 680.989418] env[69994]: _type = "VirtualMachine" [ 680.989418] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 680.990276] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 680.991368] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f1268bb9-d1fd-4fb4-bf4f-5592b8770dec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.994409] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f00194-a901-4598-b430-f86ce5075a89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.014391] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease: (returnval){ [ 681.014391] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52926989-b1be-5b35-46b0-01a066f988fc" [ 681.014391] env[69994]: _type = "HttpNfcLease" [ 681.014391] env[69994]: } obtained for exporting VM: (result){ [ 681.014391] env[69994]: value = "vm-587395" [ 681.014391] env[69994]: _type = "VirtualMachine" [ 681.014391] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 681.015421] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the lease: (returnval){ [ 681.015421] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52926989-b1be-5b35-46b0-01a066f988fc" [ 681.015421] env[69994]: _type = "HttpNfcLease" [ 681.015421] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 681.023353] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 681.023353] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52926989-b1be-5b35-46b0-01a066f988fc" [ 681.023353] env[69994]: _type = "HttpNfcLease" [ 681.023353] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 681.037316] env[69994]: DEBUG nova.compute.utils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.044095] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.044095] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.048272] env[69994]: DEBUG nova.compute.manager [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Received event network-changed-1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.048272] env[69994]: DEBUG nova.compute.manager [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Refreshing instance network info cache due to event network-changed-1acb2297-91d5-4675-bbec-1c950d6cd544. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 681.048272] env[69994]: DEBUG oslo_concurrency.lockutils [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] Acquiring lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.048272] env[69994]: DEBUG oslo_concurrency.lockutils [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] Acquired lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.048272] env[69994]: DEBUG nova.network.neutron [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Refreshing network info cache for port 1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.116975] env[69994]: DEBUG nova.policy [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba9ebda63ed842cea7d7a8c4333e2c28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29874baa31194323bf3566aa52711e4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.323229] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Successfully updated port: 35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.401018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-63d6a59a-d58c-4179-ad39-eb9863e6f84c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.401018] env[69994]: DEBUG nova.objects.instance [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lazy-loading 'migration_context' on Instance uuid 63d6a59a-d58c-4179-ad39-eb9863e6f84c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.502474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.502896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.502896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.503221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.503221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.505851] env[69994]: INFO nova.compute.manager [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Terminating instance [ 681.523696] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 681.524958] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6cbe3a10-48e2-4944-abcb-bfdde4046acc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.535565] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 681.535565] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52926989-b1be-5b35-46b0-01a066f988fc" [ 681.535565] env[69994]: _type = "HttpNfcLease" [ 681.535565] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 681.536631] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 681.536631] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52926989-b1be-5b35-46b0-01a066f988fc" [ 681.536631] env[69994]: _type = "HttpNfcLease" [ 681.536631] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 681.536969] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 681.536969] env[69994]: value = "task-2925103" [ 681.536969] env[69994]: _type = "Task" [ 681.536969] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.539633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7775e3f7-6908-406e-a354-03802829299f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.554695] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 681.560816] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 681.560816] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 681.567428] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925103, 'name': CloneVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.637149] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Successfully created port: 02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.640132] env[69994]: DEBUG nova.compute.manager [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Received event network-vif-plugged-35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.640300] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] Acquiring lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.640664] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.640664] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.640890] env[69994]: DEBUG nova.compute.manager [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] No waiting events found dispatching network-vif-plugged-35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.641026] env[69994]: WARNING nova.compute.manager [req-7ff11b34-d3ca-4131-98aa-e4f4dbc8b8ab req-206133e4-1077-4211-8235-0e9a826f5bd1 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Received unexpected event network-vif-plugged-35726e36-0b23-4204-b28b-90bc44467363 for instance with vm_state building and task_state spawning. [ 681.723423] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a10ea881-3560-490a-ba86-6b9899c4b5f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.828523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.828523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.828523] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.908708] env[69994]: DEBUG nova.objects.base [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Object Instance<63d6a59a-d58c-4179-ad39-eb9863e6f84c> lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 681.910203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e842bee7-376d-4962-a748-5599f583caf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.936672] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd31683d-b7f0-4e21-ba89-b1d8313677b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.944840] env[69994]: DEBUG oslo_vmware.api [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 681.944840] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f7411e-f94b-9286-dc13-f6a2af7efd3e" [ 681.944840] env[69994]: _type = "Task" [ 681.944840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.953969] env[69994]: DEBUG oslo_vmware.api [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f7411e-f94b-9286-dc13-f6a2af7efd3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.009423] env[69994]: DEBUG nova.compute.manager [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.009682] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.010614] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14adca08-e802-4587-a4ea-fbf012af80e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.021876] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.022187] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a7088fb-ab85-4a45-8d0e-9cfda80c809c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.032539] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 682.032539] env[69994]: value = "task-2925104" [ 682.032539] env[69994]: _type = "Task" [ 682.032539] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.041314] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.057670] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925103, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.122476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e2af9f-d97c-4f82-9ad1-e57a3d804e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.125949] env[69994]: DEBUG nova.network.neutron [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updated VIF entry in instance network info cache for port 1acb2297-91d5-4675-bbec-1c950d6cd544. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.127086] env[69994]: DEBUG nova.network.neutron [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updating instance_info_cache with network_info: [{"id": "1acb2297-91d5-4675-bbec-1c950d6cd544", "address": "fa:16:3e:1e:06:87", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1acb2297-91", "ovs_interfaceid": "1acb2297-91d5-4675-bbec-1c950d6cd544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.135160] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c4efa9-0417-4d3b-81f7-62fe25a58d70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.177769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2190ac05-fdaa-4150-acc7-021f1ced42cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.187671] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824c0f08-cb54-4782-a825-300b974a6df4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.208419] env[69994]: DEBUG nova.compute.provider_tree [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.396858] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.458489] env[69994]: DEBUG oslo_vmware.api [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f7411e-f94b-9286-dc13-f6a2af7efd3e, 'name': SearchDatastore_Task, 'duration_secs': 0.012305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.459398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.543250] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925104, 'name': PowerOffVM_Task, 'duration_secs': 0.259345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.543627] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.544287] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 682.544407] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9834001-f040-4659-a7fe-b8e239038a0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.555179] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925103, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.570910] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.608503] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.608689] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.608724] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.608955] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.609155] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.609365] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.609680] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.609947] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.610214] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.610498] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.610792] env[69994]: DEBUG nova.virt.hardware [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.611844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eda9061-19da-4f4c-ae0f-fbf4477ff951 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.615577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 682.615776] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 682.615957] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Deleting the datastore file [datastore1] 9717f586-cedc-4f21-9ea6-7bf6e2991327 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.616680] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3b72d02-8507-4fb6-9a25-93171cadeb33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.628654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e732011-1c91-4bb0-ba98-256016cafd0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.635180] env[69994]: DEBUG nova.network.neutron [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Updating instance_info_cache with network_info: [{"id": "35726e36-0b23-4204-b28b-90bc44467363", "address": "fa:16:3e:d4:70:2c", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35726e36-0b", "ovs_interfaceid": "35726e36-0b23-4204-b28b-90bc44467363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.636538] env[69994]: DEBUG oslo_concurrency.lockutils [req-be074984-5371-4811-b9f3-6c19510191e0 req-2abe207c-9b2b-4e45-8d32-ad4261029cce service nova] Releasing lock "refresh_cache-8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.638991] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for the task: (returnval){ [ 682.638991] env[69994]: value = "task-2925106" [ 682.638991] env[69994]: _type = "Task" [ 682.638991] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.655264] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.711527] env[69994]: DEBUG nova.scheduler.client.report [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.952510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.952784] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.070532] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925103, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.139934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.140122] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Instance network_info: |[{"id": "35726e36-0b23-4204-b28b-90bc44467363", "address": "fa:16:3e:d4:70:2c", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35726e36-0b", "ovs_interfaceid": "35726e36-0b23-4204-b28b-90bc44467363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.144413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:70:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35726e36-0b23-4204-b28b-90bc44467363', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.155682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Creating folder: Project (aa48a4bba9de4f50b8ed79f61dd5d4fd). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.156250] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01a31c5b-f2de-434b-b899-2e02bd272bef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.166571] env[69994]: DEBUG oslo_vmware.api [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Task: {'id': task-2925106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.166571] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.166571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.166571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.166571] env[69994]: INFO nova.compute.manager [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Took 1.16 seconds to destroy the instance on the hypervisor. [ 683.166827] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.170217] env[69994]: DEBUG nova.compute.manager [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.170217] env[69994]: DEBUG nova.network.neutron [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.171481] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Created folder: Project (aa48a4bba9de4f50b8ed79f61dd5d4fd) in parent group-v587342. [ 683.171481] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Creating folder: Instances. Parent ref: group-v587398. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.171954] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09564975-853b-4618-b75b-959b1605a4a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.184086] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Created folder: Instances in parent group-v587398. [ 683.184525] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.184921] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.185285] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-156f755e-f964-4a78-8fb1-a2747d9de7b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.212744] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.212744] env[69994]: value = "task-2925109" [ 683.212744] env[69994]: _type = "Task" [ 683.212744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.222177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.227101] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925109, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.227879] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.227s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.229156] env[69994]: DEBUG nova.objects.instance [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 683.475230] env[69994]: DEBUG nova.compute.manager [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received event network-vif-plugged-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.475230] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] Acquiring lock "70e5674d-4627-4720-9b87-955c2749e010-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.478158] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] Lock "70e5674d-4627-4720-9b87-955c2749e010-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.478158] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] Lock "70e5674d-4627-4720-9b87-955c2749e010-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.478158] env[69994]: DEBUG nova.compute.manager [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] No waiting events found dispatching network-vif-plugged-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.478158] env[69994]: WARNING nova.compute.manager [req-b7b6c990-c6d7-4a4a-9d14-bc127a876ce2 req-fd13d55c-a6dd-4c60-ba61-f7b4ced7ab6d service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received unexpected event network-vif-plugged-02be7506-27b5-4ccf-93a0-19b365247a08 for instance with vm_state building and task_state spawning. [ 683.522132] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Successfully updated port: 02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.557813] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925103, 'name': CloneVM_Task, 'duration_secs': 1.626963} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.558082] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Created linked-clone VM from snapshot [ 683.558879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62891f7-3fd9-46c5-bcf7-c09b1a6f55f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.570108] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Uploading image 21c0bf30-a996-4fb9-b00b-256911bb3baf {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 683.587501] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 683.587855] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5ea25d99-1172-4e3f-92bf-fb12e8dfd638 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.596213] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 683.596213] env[69994]: value = "task-2925110" [ 683.596213] env[69994]: _type = "Task" [ 683.596213] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.612640] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925110, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.647220] env[69994]: DEBUG nova.compute.manager [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Received event network-changed-35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.647220] env[69994]: DEBUG nova.compute.manager [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Refreshing instance network info cache due to event network-changed-35726e36-0b23-4204-b28b-90bc44467363. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 683.647220] env[69994]: DEBUG oslo_concurrency.lockutils [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] Acquiring lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.647220] env[69994]: DEBUG oslo_concurrency.lockutils [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] Acquired lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.647220] env[69994]: DEBUG nova.network.neutron [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Refreshing network info cache for port 35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.722765] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925109, 'name': CreateVM_Task, 'duration_secs': 0.466161} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.723155] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.723832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.724039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.724409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.725105] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdfdcad6-e915-41dc-82f9-687523a58e78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.729585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "daa2e7c3-b473-4848-9885-9923d8aea5ad" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.729585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "daa2e7c3-b473-4848-9885-9923d8aea5ad" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.743156] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 683.743156] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b66bac-0e13-50e4-b663-0c3766c86576" [ 683.743156] env[69994]: _type = "Task" [ 683.743156] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.743156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "daa2e7c3-b473-4848-9885-9923d8aea5ad" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.743156] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.760313] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b66bac-0e13-50e4-b663-0c3766c86576, 'name': SearchDatastore_Task, 'duration_secs': 0.015857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.760653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.760934] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.761217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.761367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.761540] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.761901] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ed739de-06b3-4f17-b228-24695bb84ce0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.774362] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.774517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 683.776178] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4512dbc-2e6e-4a6c-9930-d749d4dd608a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.784703] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 683.784703] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524221d3-6460-dbd1-79ab-92907bee3a4e" [ 683.784703] env[69994]: _type = "Task" [ 683.784703] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.801141] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524221d3-6460-dbd1-79ab-92907bee3a4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.026781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.026997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquired lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.027388] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.111222] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925110, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.246330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4818a2b-6eb1-4742-b525-aeaf54adecce tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.247746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.687s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.251039] env[69994]: DEBUG nova.objects.instance [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lazy-loading 'resources' on Instance uuid 7ea91d3b-1e43-45cd-9bff-e144c63177c8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 684.253140] env[69994]: DEBUG nova.compute.utils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.256544] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.256726] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.277041] env[69994]: DEBUG nova.network.neutron [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.297292] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524221d3-6460-dbd1-79ab-92907bee3a4e, 'name': SearchDatastore_Task, 'duration_secs': 0.016649} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.301475] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f76a88-b520-4a40-b3a6-6d7b9571f561 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.311336] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 684.311336] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e66ed3-1e18-2c2c-e854-053bf56cff22" [ 684.311336] env[69994]: _type = "Task" [ 684.311336] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.325206] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e66ed3-1e18-2c2c-e854-053bf56cff22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.347775] env[69994]: DEBUG nova.policy [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41f644c883c4436d987b79290aead911', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6a3cec211b648f0b0298983e3c0c7a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.460223] env[69994]: DEBUG nova.network.neutron [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Updated VIF entry in instance network info cache for port 35726e36-0b23-4204-b28b-90bc44467363. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.460644] env[69994]: DEBUG nova.network.neutron [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Updating instance_info_cache with network_info: [{"id": "35726e36-0b23-4204-b28b-90bc44467363", "address": "fa:16:3e:d4:70:2c", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35726e36-0b", "ovs_interfaceid": "35726e36-0b23-4204-b28b-90bc44467363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.586777] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.609108] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925110, 'name': Destroy_Task, 'duration_secs': 0.633563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.609589] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Destroyed the VM [ 684.610072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 684.610233] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1723daaa-bfb8-4f40-a5c0-fa44c575a48c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.621360] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 684.621360] env[69994]: value = "task-2925115" [ 684.621360] env[69994]: _type = "Task" [ 684.621360] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.632269] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925115, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.761727] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.779270] env[69994]: DEBUG nova.network.neutron [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [{"id": "02be7506-27b5-4ccf-93a0-19b365247a08", "address": "fa:16:3e:ca:79:3f", "network": {"id": "b2fb76d9-abd4-4e51-940d-d357d2ff0e9a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2007639793-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29874baa31194323bf3566aa52711e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02be7506-27", "ovs_interfaceid": "02be7506-27b5-4ccf-93a0-19b365247a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.782062] env[69994]: INFO nova.compute.manager [-] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Took 1.61 seconds to deallocate network for instance. [ 684.831384] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e66ed3-1e18-2c2c-e854-053bf56cff22, 'name': SearchDatastore_Task, 'duration_secs': 0.014602} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.831966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.832238] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 6aacfc4e-32b4-40d7-8240-e4449cf78925/6aacfc4e-32b4-40d7-8240-e4449cf78925.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.833243] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f47c9273-9a1e-498b-94be-8e2e9fd66864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.845351] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 684.845351] env[69994]: value = "task-2925116" [ 684.845351] env[69994]: _type = "Task" [ 684.845351] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.866424] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.882040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.882293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.938638] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Successfully created port: 85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.969475] env[69994]: DEBUG oslo_concurrency.lockutils [req-8da16560-51bd-4f59-ac92-8845d26d5e3b req-978fd990-16e4-4e4c-872c-4e4625783968 service nova] Releasing lock "refresh_cache-6aacfc4e-32b4-40d7-8240-e4449cf78925" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.140334] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925115, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.282733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Releasing lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.283266] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Instance network_info: |[{"id": "02be7506-27b5-4ccf-93a0-19b365247a08", "address": "fa:16:3e:ca:79:3f", "network": {"id": "b2fb76d9-abd4-4e51-940d-d357d2ff0e9a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2007639793-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29874baa31194323bf3566aa52711e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02be7506-27", "ovs_interfaceid": "02be7506-27b5-4ccf-93a0-19b365247a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.283782] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:79:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02be7506-27b5-4ccf-93a0-19b365247a08', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.293628] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Creating folder: Project (29874baa31194323bf3566aa52711e4c). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.294888] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.295266] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0805ff05-f066-4602-8579-b3398d704cfa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.313277] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Created folder: Project (29874baa31194323bf3566aa52711e4c) in parent group-v587342. [ 685.313277] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Creating folder: Instances. Parent ref: group-v587404. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.313277] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3113086b-1af5-433a-8c5d-b85bda2d6ec5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.322824] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Created folder: Instances in parent group-v587404. [ 685.323084] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.323313] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.323527] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c8ccf99-f38b-4dc1-bed4-66aa3f54c314 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.363223] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925116, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.364909] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.364909] env[69994]: value = "task-2925119" [ 685.364909] env[69994]: _type = "Task" [ 685.364909] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.378518] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4500898d-d57c-4286-b1db-a191347a806a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.385684] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2c6fe1-37b6-4102-9c6a-ab86cc5cd710 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.424024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f89d58-4ee2-4648-9794-c24ece07aa6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.433396] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3af7e2-5262-48a0-9b64-e89314f98b2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.450406] env[69994]: DEBUG nova.compute.provider_tree [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.521278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "e0764e41-0810-45a1-8917-ac901f0f8321" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.521657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.634399] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925115, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.775147] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.780140] env[69994]: DEBUG nova.compute.manager [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.780820] env[69994]: DEBUG nova.compute.manager [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing instance network info cache due to event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 685.781078] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] Acquiring lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.782020] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] Acquired lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.782020] env[69994]: DEBUG nova.network.neutron [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.803203] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.803456] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.803611] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.803976] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.804131] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.804296] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.804512] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.804668] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.804835] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.804997] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.805183] env[69994]: DEBUG nova.virt.hardware [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.806079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57d03bb-85a6-4ece-b7b4-6fc1fc25033e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.817020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09806c2-646b-4e89-9f92-16d7da8d0679 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.863662] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.863992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 6aacfc4e-32b4-40d7-8240-e4449cf78925/6aacfc4e-32b4-40d7-8240-e4449cf78925.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.864225] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.864512] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd1d91f8-77a4-42a2-97ef-2cc64ecf7255 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.874674] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925119, 'name': CreateVM_Task, 'duration_secs': 0.357509} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.876431] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 685.876768] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 685.876768] env[69994]: value = "task-2925120" [ 685.876768] env[69994]: _type = "Task" [ 685.876768] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.877542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.877707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.878043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 685.878360] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fddd435-5ff5-4ace-8ab3-015f34750f5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.887745] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 685.887745] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f60194-8370-3687-fe70-7faf6e8c28d6" [ 685.887745] env[69994]: _type = "Task" [ 685.887745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.891137] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.898546] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f60194-8370-3687-fe70-7faf6e8c28d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.901970] env[69994]: DEBUG nova.compute.manager [req-7efee4aa-9621-44cc-a81a-0e17312b5ebf req-613d1fb4-8fed-4a72-b001-12bfd495d895 service nova] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Received event network-vif-deleted-c26583c0-ad13-4a74-adc9-deb6e535a3f0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.956884] env[69994]: DEBUG nova.scheduler.client.report [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.135431] env[69994]: DEBUG oslo_vmware.api [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925115, 'name': RemoveSnapshot_Task, 'duration_secs': 1.228889} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.135754] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 686.197231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "180b4236-289c-4818-885d-c66e9e9a2ea8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.197477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.389383] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116498} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.389738] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.390782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ea5c0b-730e-49a1-8a49-da4c5cb6ea69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.422794] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f60194-8370-3687-fe70-7faf6e8c28d6, 'name': SearchDatastore_Task, 'duration_secs': 0.012084} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.432755] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 6aacfc4e-32b4-40d7-8240-e4449cf78925/6aacfc4e-32b4-40d7-8240-e4449cf78925.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.437045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.437348] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.437611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.437770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.438094] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.438268] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebcd52f4-e269-43cd-b083-f0020ab37b17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.454070] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c61fa812-74ae-4cb1-a588-f309ecf259c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.462039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.214s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.466913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.261s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.471150] env[69994]: INFO nova.compute.claims [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.471434] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.471721] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.472671] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 686.472671] env[69994]: value = "task-2925121" [ 686.472671] env[69994]: _type = "Task" [ 686.472671] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.474111] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3887ebb1-e0a5-44ec-9d53-dcc90b2788a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.486303] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.489872] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 686.489872] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b4ca49-1208-bb85-ef95-a5c8e56f82d7" [ 686.489872] env[69994]: _type = "Task" [ 686.489872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.497506] env[69994]: INFO nova.scheduler.client.report [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Deleted allocations for instance 7ea91d3b-1e43-45cd-9bff-e144c63177c8 [ 686.501544] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b4ca49-1208-bb85-ef95-a5c8e56f82d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.613447] env[69994]: DEBUG nova.network.neutron [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updated VIF entry in instance network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.613876] env[69994]: DEBUG nova.network.neutron [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [{"id": "02be7506-27b5-4ccf-93a0-19b365247a08", "address": "fa:16:3e:ca:79:3f", "network": {"id": "b2fb76d9-abd4-4e51-940d-d357d2ff0e9a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2007639793-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29874baa31194323bf3566aa52711e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02be7506-27", "ovs_interfaceid": "02be7506-27b5-4ccf-93a0-19b365247a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.642807] env[69994]: WARNING nova.compute.manager [None req-633e4dd7-5bc4-4e36-b88e-77c060491ce6 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Image not found during snapshot: nova.exception.ImageNotFound: Image 21c0bf30-a996-4fb9-b00b-256911bb3baf could not be found. [ 686.991799] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925121, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.002874] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b4ca49-1208-bb85-ef95-a5c8e56f82d7, 'name': SearchDatastore_Task, 'duration_secs': 0.017775} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.003834] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a95eae3-112f-46b9-b45c-9a28af03fc3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.012105] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 687.012105] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522bb05a-6a95-8ba5-e4c5-048458d1f07d" [ 687.012105] env[69994]: _type = "Task" [ 687.012105] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.012658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbcd99d8-d22b-40d8-86ca-a276c1c9be56 tempest-ServerExternalEventsTest-914976596 tempest-ServerExternalEventsTest-914976596-project-member] Lock "7ea91d3b-1e43-45cd-9bff-e144c63177c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.673s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.024490] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522bb05a-6a95-8ba5-e4c5-048458d1f07d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.075763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "803e9885-000f-4696-9fb9-03361ef46538" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.075763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.075763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "803e9885-000f-4696-9fb9-03361ef46538-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.075763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.075970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.076838] env[69994]: INFO nova.compute.manager [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Terminating instance [ 687.117614] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a43de8c-d641-42ac-a13b-ad2eb3670f12 req-15a426b7-f270-4e9c-8d3c-cd0efd0a68c7 service nova] Releasing lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.254129] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Successfully updated port: 85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.492110] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925121, 'name': ReconfigVM_Task, 'duration_secs': 0.565903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.492418] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 6aacfc4e-32b4-40d7-8240-e4449cf78925/6aacfc4e-32b4-40d7-8240-e4449cf78925.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.493077] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6eb047d1-314d-44a4-b8a1-632e1d2cda9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.500754] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 687.500754] env[69994]: value = "task-2925123" [ 687.500754] env[69994]: _type = "Task" [ 687.500754] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.512135] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925123, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.528627] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522bb05a-6a95-8ba5-e4c5-048458d1f07d, 'name': SearchDatastore_Task, 'duration_secs': 0.015608} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.530578] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.530874] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 70e5674d-4627-4720-9b87-955c2749e010/70e5674d-4627-4720-9b87-955c2749e010.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.531182] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2419a558-657c-436a-851f-4eb969919beb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.543041] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 687.543041] env[69994]: value = "task-2925124" [ 687.543041] env[69994]: _type = "Task" [ 687.543041] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.558624] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.585097] env[69994]: DEBUG nova.compute.manager [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 687.585346] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.586299] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ad5de3-fba4-4be4-bed5-d8d424d27e04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.599074] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 687.599367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfb4228d-6ea3-41d0-bb37-cb40b62ef7d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.616034] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 687.616034] env[69994]: value = "task-2925125" [ 687.616034] env[69994]: _type = "Task" [ 687.616034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.632772] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.763132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.763339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquired lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.763339] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.814489] env[69994]: DEBUG nova.compute.manager [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Received event network-vif-plugged-85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.814489] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Acquiring lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.814489] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.814489] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.814489] env[69994]: DEBUG nova.compute.manager [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] No waiting events found dispatching network-vif-plugged-85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.814724] env[69994]: WARNING nova.compute.manager [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Received unexpected event network-vif-plugged-85de3239-5385-4cfe-ac05-cfb286970c9e for instance with vm_state building and task_state spawning. [ 687.814724] env[69994]: DEBUG nova.compute.manager [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Received event network-changed-85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.814724] env[69994]: DEBUG nova.compute.manager [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Refreshing instance network info cache due to event network-changed-85de3239-5385-4cfe-ac05-cfb286970c9e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 687.814724] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Acquiring lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.012071] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925123, 'name': Rename_Task, 'duration_secs': 0.178526} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.012756] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.013436] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d31b952-3c7a-4848-a000-3d1db37bd0aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.027585] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 688.027585] env[69994]: value = "task-2925126" [ 688.027585] env[69994]: _type = "Task" [ 688.027585] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.045726] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925126, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.056539] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925124, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.106098] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e69e3d6-ea88-406a-958b-32d404490545 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.114310] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb579410-5ba3-4e93-bfe7-a47d9eb2a326 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.153934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900d0f62-33a5-45eb-a323-cf3120e14e14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.157724] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925125, 'name': PowerOffVM_Task, 'duration_secs': 0.217717} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.158014] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.158204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.158925] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff8c00bc-1fd6-4948-a8ee-78b8f1f9fd58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.164241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20a546a-39cc-4d3b-ac41-fe409ff842de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.181326] env[69994]: DEBUG nova.compute.provider_tree [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.227962] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.228259] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.228444] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleting the datastore file [datastore2] 803e9885-000f-4696-9fb9-03361ef46538 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.228712] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0984c813-3016-4cdf-b082-ce466f719b31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.235410] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 688.235410] env[69994]: value = "task-2925128" [ 688.235410] env[69994]: _type = "Task" [ 688.235410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.243919] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.318277] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.539711] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925126, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.540799] env[69994]: DEBUG nova.network.neutron [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Updating instance_info_cache with network_info: [{"id": "85de3239-5385-4cfe-ac05-cfb286970c9e", "address": "fa:16:3e:27:b0:cb", "network": {"id": "4062f8e7-aa89-4561-80d8-21e235919451", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1062315607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6a3cec211b648f0b0298983e3c0c7a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85de3239-53", "ovs_interfaceid": "85de3239-5385-4cfe-ac05-cfb286970c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.555403] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528944} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.556303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 70e5674d-4627-4720-9b87-955c2749e010/70e5674d-4627-4720-9b87-955c2749e010.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.556554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.556813] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84891a24-ca22-434f-891f-00a874ddbb95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.566480] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 688.566480] env[69994]: value = "task-2925129" [ 688.566480] env[69994]: _type = "Task" [ 688.566480] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.578802] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.684790] env[69994]: DEBUG nova.scheduler.client.report [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.750835] env[69994]: DEBUG oslo_vmware.api [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273161} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.750835] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.750835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.750835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.750835] env[69994]: INFO nova.compute.manager [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Took 1.16 seconds to destroy the instance on the hypervisor. [ 688.751124] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.751124] env[69994]: DEBUG nova.compute.manager [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 688.751124] env[69994]: DEBUG nova.network.neutron [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.041575] env[69994]: DEBUG oslo_vmware.api [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925126, 'name': PowerOnVM_Task, 'duration_secs': 0.599605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.041962] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.042231] env[69994]: INFO nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Took 9.19 seconds to spawn the instance on the hypervisor. [ 689.042451] env[69994]: DEBUG nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.043582] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef510354-a190-40fb-a28a-2411edb7eb4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.047095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Releasing lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.047460] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Instance network_info: |[{"id": "85de3239-5385-4cfe-ac05-cfb286970c9e", "address": "fa:16:3e:27:b0:cb", "network": {"id": "4062f8e7-aa89-4561-80d8-21e235919451", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1062315607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6a3cec211b648f0b0298983e3c0c7a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85de3239-53", "ovs_interfaceid": "85de3239-5385-4cfe-ac05-cfb286970c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.047739] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Acquired lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.048704] env[69994]: DEBUG nova.network.neutron [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Refreshing network info cache for port 85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.049167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:b0:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85de3239-5385-4cfe-ac05-cfb286970c9e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.057941] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Creating folder: Project (e6a3cec211b648f0b0298983e3c0c7a2). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.061561] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aba659ea-efef-4430-bd54-fc2677e1ce7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.077741] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090884} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.080091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.080501] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Created folder: Project (e6a3cec211b648f0b0298983e3c0c7a2) in parent group-v587342. [ 689.081166] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Creating folder: Instances. Parent ref: group-v587407. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.081552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3416e24-7a03-44dd-b620-8bd0c15e5f6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.084526] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33689e0b-786c-4386-b740-6dc640faee52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.107679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 70e5674d-4627-4720-9b87-955c2749e010/70e5674d-4627-4720-9b87-955c2749e010.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.109794] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f22ca040-d01b-4e9c-95e2-9c493bd33775 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.124935] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Created folder: Instances in parent group-v587407. [ 689.126110] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.128570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.131350] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-605f0800-4cc5-485f-851a-d80a079395d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.149118] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 689.149118] env[69994]: value = "task-2925133" [ 689.149118] env[69994]: _type = "Task" [ 689.149118] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.155058] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.155058] env[69994]: value = "task-2925134" [ 689.155058] env[69994]: _type = "Task" [ 689.155058] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.158871] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.170279] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925134, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.190035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.190558] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.193454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.750s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.193686] env[69994]: DEBUG nova.objects.instance [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lazy-loading 'resources' on Instance uuid 48f6ebca-d7fe-4086-80f4-0b89789dcddb {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.431264] env[69994]: DEBUG nova.network.neutron [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Updated VIF entry in instance network info cache for port 85de3239-5385-4cfe-ac05-cfb286970c9e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.431675] env[69994]: DEBUG nova.network.neutron [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Updating instance_info_cache with network_info: [{"id": "85de3239-5385-4cfe-ac05-cfb286970c9e", "address": "fa:16:3e:27:b0:cb", "network": {"id": "4062f8e7-aa89-4561-80d8-21e235919451", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1062315607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6a3cec211b648f0b0298983e3c0c7a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85de3239-53", "ovs_interfaceid": "85de3239-5385-4cfe-ac05-cfb286970c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.576888] env[69994]: INFO nova.compute.manager [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Took 40.70 seconds to build instance. [ 689.669649] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925133, 'name': ReconfigVM_Task, 'duration_secs': 0.49391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.673177] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 70e5674d-4627-4720-9b87-955c2749e010/70e5674d-4627-4720-9b87-955c2749e010.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.673826] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925134, 'name': CreateVM_Task, 'duration_secs': 0.448267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.674059] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94149cdf-52d8-40b8-a97d-a3226864ef99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.675952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.676632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.676793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.677171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 689.677768] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e466e5c-0814-4d3c-85d2-0e269e233902 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.682342] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 689.682342] env[69994]: value = "task-2925135" [ 689.682342] env[69994]: _type = "Task" [ 689.682342] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.683684] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 689.683684] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ab91f4-df7e-3a03-7290-0e87df08f2d3" [ 689.683684] env[69994]: _type = "Task" [ 689.683684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.697528] env[69994]: DEBUG nova.compute.utils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.701639] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925135, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.710061] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.710061] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.710061] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ab91f4-df7e-3a03-7290-0e87df08f2d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.716418] env[69994]: DEBUG nova.network.neutron [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.782139] env[69994]: DEBUG nova.policy [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20f72abb49c74d728377415b6ceb9bf9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3be3c16688fa4225a445a75e40e0f6a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.885611] env[69994]: DEBUG nova.compute.manager [req-bdccc124-5f23-4dc4-9f71-d28f17ea272b req-ce9251b8-3d1c-4b88-b037-696e26bd89b3 service nova] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Received event network-vif-deleted-c2407183-ab55-4108-a4c9-1fb48e727a35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.934447] env[69994]: DEBUG oslo_concurrency.lockutils [req-ac41c154-3f33-4c08-9ef9-9c67cb03d538 req-09bd1643-6b79-4730-bedc-fa2cd3e8415a service nova] Releasing lock "refresh_cache-55dd32b0-e67f-4943-86e8-b9956267fedc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.079401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c80de12f-d45d-4ffe-8278-6400982e2eca tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.543s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.199269] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925135, 'name': Rename_Task, 'duration_secs': 0.217551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.199269] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.199269] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd57205a-5530-4ad8-b98e-73fcc6d373cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.206106] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.208590] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ab91f4-df7e-3a03-7290-0e87df08f2d3, 'name': SearchDatastore_Task, 'duration_secs': 0.020903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.211848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.212144] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.212568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.212756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.213016] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.214471] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Successfully created port: 4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.216547] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46edfd75-72e7-4ff8-b927-0517ed2a4b32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.219759] env[69994]: INFO nova.compute.manager [-] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Took 1.47 seconds to deallocate network for instance. [ 690.220364] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 690.220364] env[69994]: value = "task-2925136" [ 690.220364] env[69994]: _type = "Task" [ 690.220364] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.230956] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f593fafd-1155-4800-b3c2-981704e1c85d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.238620] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925136, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.238988] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.239210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.240327] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca83b6a8-2d20-4c72-ad23-1707e47980c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.248119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf80a8cb-14c4-41ea-986b-4def361bcdac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.253345] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 690.253345] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5213d6fc-e890-c83f-a4b2-29e300182f8d" [ 690.253345] env[69994]: _type = "Task" [ 690.253345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.284818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349f5f48-982f-462e-b27f-4a29607e9a44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.291534] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5213d6fc-e890-c83f-a4b2-29e300182f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.015631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.292700] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceedbab2-d03b-4944-9bd8-ef38bb2c35fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.298524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42124cfb-6804-4313-8de0-5579b81e60fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.304337] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 690.304337] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a61b10-9ff0-c0f0-21f3-f7a02a7ed5b4" [ 690.304337] env[69994]: _type = "Task" [ 690.304337] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.317992] env[69994]: DEBUG nova.compute.provider_tree [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.325491] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a61b10-9ff0-c0f0-21f3-f7a02a7ed5b4, 'name': SearchDatastore_Task, 'duration_secs': 0.016991} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.325805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.326183] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 55dd32b0-e67f-4943-86e8-b9956267fedc/55dd32b0-e67f-4943-86e8-b9956267fedc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.326472] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab40afd7-bb0f-4545-b758-e8c5620eed3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.333743] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 690.333743] env[69994]: value = "task-2925137" [ 690.333743] env[69994]: _type = "Task" [ 690.333743] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.344772] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.587843] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.731782] env[69994]: DEBUG oslo_vmware.api [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925136, 'name': PowerOnVM_Task, 'duration_secs': 0.454539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.732079] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 690.732282] env[69994]: INFO nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Took 8.16 seconds to spawn the instance on the hypervisor. [ 690.732455] env[69994]: DEBUG nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 690.733327] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f8ad55-03f0-4000-8015-b04a31590f8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.736713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.821597] env[69994]: DEBUG nova.scheduler.client.report [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 690.846902] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925137, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.103039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.103307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.123905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.223751] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.254424] env[69994]: INFO nova.compute.manager [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Took 37.64 seconds to build instance. [ 691.261298] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.261585] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.261767] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.262584] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.262584] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.262584] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.262584] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.262846] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.263260] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.263624] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.264225] env[69994]: DEBUG nova.virt.hardware [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.266484] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cdb36b-de73-4c27-aa30-097ea46427c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.281613] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807715f9-0918-4df6-9031-35bfded83600 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.335437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.343956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.704s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.346580] env[69994]: INFO nova.compute.claims [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.361135] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659978} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.362372] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 55dd32b0-e67f-4943-86e8-b9956267fedc/55dd32b0-e67f-4943-86e8-b9956267fedc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.362706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.363206] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a715cb2d-7c4c-41c1-893c-eb3b5ef68b74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.370776] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 691.370776] env[69994]: value = "task-2925139" [ 691.370776] env[69994]: _type = "Task" [ 691.370776] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.383106] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.387919] env[69994]: INFO nova.scheduler.client.report [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Deleted allocations for instance 48f6ebca-d7fe-4086-80f4-0b89789dcddb [ 691.449907] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 691.451059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c027a6-a3fb-4494-bdf3-657ba03196b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.462716] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 691.462840] env[69994]: ERROR oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk due to incomplete transfer. [ 691.463073] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-711165b0-8e59-445f-883c-0402db2d0a9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.470249] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52162278-8bf8-5570-3341-55822d951beb/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 691.470504] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Uploaded image 3b72b40e-7308-45f8-b5ff-72de11dda04c to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 691.473174] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 691.474120] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-08d0119a-1b7d-4bc8-825b-06883e5ddbb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.480379] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 691.480379] env[69994]: value = "task-2925140" [ 691.480379] env[69994]: _type = "Task" [ 691.480379] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.488928] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925140, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.757725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6acc05f-d8f6-4d0b-a764-b76b9eac117b tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.379s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.882600] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.882874] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.884398] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e0bdcc-2b77-4677-948f-1d5af1705641 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.916133] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 55dd32b0-e67f-4943-86e8-b9956267fedc/55dd32b0-e67f-4943-86e8-b9956267fedc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.916133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2a950d7c-92ae-46c3-840b-9da39d93e371 tempest-ServerDiagnosticsV248Test-197083 tempest-ServerDiagnosticsV248Test-197083-project-member] Lock "48f6ebca-d7fe-4086-80f4-0b89789dcddb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.916263] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-989611cf-7ce7-4b1c-8216-d5a0460043f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.940927] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 691.940927] env[69994]: value = "task-2925141" [ 691.940927] env[69994]: _type = "Task" [ 691.940927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.953052] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925141, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.992918] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925140, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.092964] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Successfully updated port: 4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.265865] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 692.361099] env[69994]: DEBUG nova.compute.manager [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Received event network-vif-plugged-4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.361318] env[69994]: DEBUG oslo_concurrency.lockutils [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] Acquiring lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.361671] env[69994]: DEBUG oslo_concurrency.lockutils [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.361671] env[69994]: DEBUG oslo_concurrency.lockutils [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.362609] env[69994]: DEBUG nova.compute.manager [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] No waiting events found dispatching network-vif-plugged-4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.362609] env[69994]: WARNING nova.compute.manager [req-13b2c6c5-80fd-4b0a-ab8e-850095409c93 req-1e2a88c1-b25c-42a6-9ec0-5ea3e99ba193 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Received unexpected event network-vif-plugged-4513bec6-f476-4ac6-91cf-67ff8b19e2cb for instance with vm_state building and task_state spawning. [ 692.455237] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.493620] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925140, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.604166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.604367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquired lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.604487] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.791407] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.872330] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a4fe6d-09f4-4121-b84d-33028d9b3f32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.880698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d24a4d-3409-480e-9eab-cbccaf77a284 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.913704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b56990-18ca-4629-b329-a3365b83bfdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.920902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24083871-9ac7-4ce0-88a7-ec43a46f5c66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.935301] env[69994]: DEBUG nova.compute.provider_tree [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.954871] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925141, 'name': ReconfigVM_Task, 'duration_secs': 0.880034} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.957026] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 55dd32b0-e67f-4943-86e8-b9956267fedc/55dd32b0-e67f-4943-86e8-b9956267fedc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.957026] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecb3beb2-fc65-4054-90b2-7af0a78551d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.962128] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 692.962128] env[69994]: value = "task-2925142" [ 692.962128] env[69994]: _type = "Task" [ 692.962128] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.975326] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925142, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.992041] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925140, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.145359] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.394746] env[69994]: DEBUG nova.network.neutron [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Updating instance_info_cache with network_info: [{"id": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "address": "fa:16:3e:88:7f:05", "network": {"id": "0439e247-6ece-47dd-8da8-ac0b5ba3e22f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-591031306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be3c16688fa4225a445a75e40e0f6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4513bec6-f4", "ovs_interfaceid": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.442143] env[69994]: DEBUG nova.scheduler.client.report [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.472216] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925142, 'name': Rename_Task, 'duration_secs': 0.191167} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.472507] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.472754] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c8375fd-65d4-4f77-a662-aaaf89d7f83f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.478871] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 693.478871] env[69994]: value = "task-2925144" [ 693.478871] env[69994]: _type = "Task" [ 693.478871] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.488724] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.498501] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925140, 'name': Destroy_Task, 'duration_secs': 1.887417} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.499142] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Destroyed the VM [ 693.499455] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 693.499963] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b2979bbe-dbe2-4dbd-855f-650a6c62025c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.507313] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 693.507313] env[69994]: value = "task-2925145" [ 693.507313] env[69994]: _type = "Task" [ 693.507313] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.516377] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925145, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.650212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.650615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.650852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.651060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.651265] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.653952] env[69994]: INFO nova.compute.manager [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Terminating instance [ 693.728618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "9b6aca3c-337b-4067-80e0-487d956fabc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.729111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.897545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Releasing lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.898415] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Instance network_info: |[{"id": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "address": "fa:16:3e:88:7f:05", "network": {"id": "0439e247-6ece-47dd-8da8-ac0b5ba3e22f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-591031306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be3c16688fa4225a445a75e40e0f6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4513bec6-f4", "ovs_interfaceid": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.899086] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:7f:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4513bec6-f476-4ac6-91cf-67ff8b19e2cb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.910051] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Creating folder: Project (3be3c16688fa4225a445a75e40e0f6a1). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.910051] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f219475-0f64-49b9-a457-69436a47ec2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.918628] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Created folder: Project (3be3c16688fa4225a445a75e40e0f6a1) in parent group-v587342. [ 693.919259] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Creating folder: Instances. Parent ref: group-v587411. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.919382] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4ffdf63-44ab-4ae6-ba25-1ac657b789b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.930573] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Created folder: Instances in parent group-v587411. [ 693.931228] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.931529] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.931816] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fd3d301-5fb7-4b5b-b4d4-91f52e1a9712 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.950657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.951209] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 693.954462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.644s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.955550] env[69994]: DEBUG nova.objects.instance [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lazy-loading 'resources' on Instance uuid 443382a8-64af-4f13-b7ab-11234fb13fcf {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 693.962910] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.962910] env[69994]: value = "task-2925148" [ 693.962910] env[69994]: _type = "Task" [ 693.962910] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.976895] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925148, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.994241] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925144, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.019342] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925145, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.161139] env[69994]: DEBUG nova.compute.manager [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.161543] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.162475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361147da-0439-4879-998e-34611e495c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.173190] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 694.173499] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cc3ee6c-6f45-4168-bfa4-46c5319dea7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.248181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 694.248455] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 694.248643] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore1] f3ae584d-18a5-4bbe-b4bf-860e2332b324 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.248967] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a708658e-81fd-4fa3-876a-da59e1f492a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.256014] env[69994]: DEBUG oslo_vmware.api [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 694.256014] env[69994]: value = "task-2925150" [ 694.256014] env[69994]: _type = "Task" [ 694.256014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.269201] env[69994]: DEBUG oslo_vmware.api [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.466211] env[69994]: DEBUG nova.compute.utils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 694.470799] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 694.471132] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 694.500874] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925148, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.507189] env[69994]: DEBUG oslo_vmware.api [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925144, 'name': PowerOnVM_Task, 'duration_secs': 0.55246} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.507637] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.507923] env[69994]: INFO nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Took 8.73 seconds to spawn the instance on the hypervisor. [ 694.508218] env[69994]: DEBUG nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.512440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132079d8-2777-4031-a1e9-49fa2190845d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.533201] env[69994]: DEBUG oslo_vmware.api [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925145, 'name': RemoveSnapshot_Task, 'duration_secs': 0.53193} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.536225] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 694.536493] env[69994]: INFO nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Took 17.80 seconds to snapshot the instance on the hypervisor. [ 694.566686] env[69994]: DEBUG nova.policy [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0cba98ba96246a0a9995caa28b03833', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a35626d7559c4a72a8f0e932b3d47de7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 694.683681] env[69994]: DEBUG nova.compute.manager [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.683681] env[69994]: DEBUG nova.compute.manager [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing instance network info cache due to event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.683681] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] Acquiring lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.683681] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] Acquired lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.683785] env[69994]: DEBUG nova.network.neutron [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.775733] env[69994]: DEBUG oslo_vmware.api [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.46642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.775733] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.775733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 694.775733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.775733] env[69994]: INFO nova.compute.manager [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Took 0.61 seconds to destroy the instance on the hypervisor. [ 694.776112] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.776112] env[69994]: DEBUG nova.compute.manager [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 694.776112] env[69994]: DEBUG nova.network.neutron [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.886284] env[69994]: DEBUG nova.compute.manager [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Received event network-changed-4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.886485] env[69994]: DEBUG nova.compute.manager [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Refreshing instance network info cache due to event network-changed-4513bec6-f476-4ac6-91cf-67ff8b19e2cb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.886695] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] Acquiring lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.886834] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] Acquired lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.886989] env[69994]: DEBUG nova.network.neutron [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Refreshing network info cache for port 4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.975189] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 694.991022] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925148, 'name': CreateVM_Task, 'duration_secs': 0.575779} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.991022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.991022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.991022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.991022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.991022] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b3b6a41-d36d-4ebf-b227-3c6a83ab5ac2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.999057] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 694.999057] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528a590e-16eb-a887-43a9-c04ff27feef8" [ 694.999057] env[69994]: _type = "Task" [ 694.999057] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.007320] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528a590e-16eb-a887-43a9-c04ff27feef8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.018248] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully created port: baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.044451] env[69994]: DEBUG nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance disappeared during snapshot {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 695.053297] env[69994]: INFO nova.compute.manager [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Took 38.80 seconds to build instance. [ 695.066814] env[69994]: DEBUG nova.compute.manager [None req-a9880109-9370-4953-8762-9e5de5e9aff7 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image not found during clean up 3b72b40e-7308-45f8-b5ff-72de11dda04c {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 695.080905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c46f894-ffd3-4cc8-83b2-b37e6b8d6ac6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.088456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f09db1-a7cd-47c3-9d08-7b6411ed1d34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.122300] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b4f162-d723-4239-bd49-cc14218dcf0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.129199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88116029-5e48-4198-85bf-268e51df23ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.142721] env[69994]: DEBUG nova.compute.provider_tree [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.147461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.147687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.447943] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully created port: 8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.509372] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528a590e-16eb-a887-43a9-c04ff27feef8, 'name': SearchDatastore_Task, 'duration_secs': 0.044555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.509681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.509914] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.513760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.513760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.513760] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.513760] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bc2e477-794b-4c5c-991e-e8390d6aaf1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.519556] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.519752] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.525773] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08bdadaf-e865-4786-8a89-2b790aa020d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.535025] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 695.535025] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cde2c8-34fe-88b3-4bb6-533f9a86adc8" [ 695.535025] env[69994]: _type = "Task" [ 695.535025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.544075] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cde2c8-34fe-88b3-4bb6-533f9a86adc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.556248] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f68baa80-5f4e-4b02-9de1-b7a02b818ca7 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.437s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.650230] env[69994]: DEBUG nova.scheduler.client.report [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.768774] env[69994]: DEBUG nova.network.neutron [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Updated VIF entry in instance network info cache for port 4513bec6-f476-4ac6-91cf-67ff8b19e2cb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.768774] env[69994]: DEBUG nova.network.neutron [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Updating instance_info_cache with network_info: [{"id": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "address": "fa:16:3e:88:7f:05", "network": {"id": "0439e247-6ece-47dd-8da8-ac0b5ba3e22f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-591031306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3be3c16688fa4225a445a75e40e0f6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4513bec6-f4", "ovs_interfaceid": "4513bec6-f476-4ac6-91cf-67ff8b19e2cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.828061] env[69994]: DEBUG nova.network.neutron [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updated VIF entry in instance network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.828528] env[69994]: DEBUG nova.network.neutron [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [{"id": "02be7506-27b5-4ccf-93a0-19b365247a08", "address": "fa:16:3e:ca:79:3f", "network": {"id": "b2fb76d9-abd4-4e51-940d-d357d2ff0e9a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2007639793-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29874baa31194323bf3566aa52711e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02be7506-27", "ovs_interfaceid": "02be7506-27b5-4ccf-93a0-19b365247a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.886239] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully created port: 1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.986818] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.019719] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.020227] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.020521] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.020853] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.021161] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.021462] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.024059] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.024059] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.024059] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.024059] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.024059] env[69994]: DEBUG nova.virt.hardware [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.024277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328531d9-aff6-4f70-9d2d-d035f697fbda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.037017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469f19f7-b31c-418c-848d-763f75d6576c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.059910] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.063689] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cde2c8-34fe-88b3-4bb6-533f9a86adc8, 'name': SearchDatastore_Task, 'duration_secs': 0.024207} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.064739] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba8860b-7084-460d-a5dd-66eacf982857 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.070567] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 696.070567] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e88928-6f98-01b6-2f00-30ec9d17691e" [ 696.070567] env[69994]: _type = "Task" [ 696.070567] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.079558] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e88928-6f98-01b6-2f00-30ec9d17691e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.158112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.160652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.146s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.162171] env[69994]: INFO nova.compute.claims [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.185647] env[69994]: INFO nova.scheduler.client.report [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Deleted allocations for instance 443382a8-64af-4f13-b7ab-11234fb13fcf [ 696.274022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1bd5ea8-63a4-4b76-8836-a9348cc34061 req-ce8a6dbe-90fb-47ad-bcc7-ba544af3a459 service nova] Releasing lock "refresh_cache-e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.286894] env[69994]: DEBUG nova.network.neutron [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.338297] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc4a828c-5f65-458c-afba-6defc29285ae req-6fe65950-1af1-46b0-a313-5bebfe877576 service nova] Releasing lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.581940] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e88928-6f98-01b6-2f00-30ec9d17691e, 'name': SearchDatastore_Task, 'duration_secs': 0.02769} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.582282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.582485] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4/e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.582745] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4aca69e-7264-4027-9c50-9d524c52778c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.586074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.590596] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 696.590596] env[69994]: value = "task-2925152" [ 696.590596] env[69994]: _type = "Task" [ 696.590596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.600201] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.693989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b187f193-8ab2-42aa-8e5f-c80ddfa645dc tempest-ImagesNegativeTestJSON-2054161372 tempest-ImagesNegativeTestJSON-2054161372-project-member] Lock "443382a8-64af-4f13-b7ab-11234fb13fcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.621s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.745748] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "55dd32b0-e67f-4943-86e8-b9956267fedc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.746033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.746249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.746635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.746874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.749585] env[69994]: INFO nova.compute.manager [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Terminating instance [ 696.791847] env[69994]: INFO nova.compute.manager [-] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Took 2.02 seconds to deallocate network for instance. [ 697.108422] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501434} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.108422] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4/e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.108422] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.108422] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6a407af-98da-41ba-8f84-c96420a03033 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.118183] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 697.118183] env[69994]: value = "task-2925154" [ 697.118183] env[69994]: _type = "Task" [ 697.118183] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.129113] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.188865] env[69994]: DEBUG nova.compute.manager [req-a49d13be-fb85-46f2-994b-c4565ebe0c99 req-ab47f13b-45c5-4e8d-abc4-7e63e9e54656 service nova] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Received event network-vif-deleted-3e918786-01b7-4a7e-a884-720a3c170676 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.257295] env[69994]: DEBUG nova.compute.manager [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 697.257707] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 697.261450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a5d072-fa66-4b27-962b-1830d55989ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.270650] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 697.273239] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dbdd17e-a4e5-4ccc-aa9e-ed26cb77b343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.280535] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 697.280535] env[69994]: value = "task-2925155" [ 697.280535] env[69994]: _type = "Task" [ 697.280535] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.292787] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.299506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.615759] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully updated port: baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.631277] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.633718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.634604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163ccf8c-6a05-47e9-9a2b-59dfdaad2730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.659926] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4/e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.664224] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acb9c34d-b6c8-4c7d-ae19-cb6882df241e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.684096] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 697.684096] env[69994]: value = "task-2925156" [ 697.684096] env[69994]: _type = "Task" [ 697.684096] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.696763] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925156, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.737412] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606cdf42-cb42-401e-96dd-d76a4b9939eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.745239] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc0683a-03ab-4350-beca-e74043702264 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.777056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0220a663-1de6-40dd-b029-46f0b0616851 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.787950] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0484552-d452-4523-b0af-c285d0254a54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.795205] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925155, 'name': PowerOffVM_Task, 'duration_secs': 0.281322} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.795764] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.795928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.796192] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a2b29ba-66bf-44de-acb4-e0f901edbdc0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.805537] env[69994]: DEBUG nova.compute.provider_tree [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.869772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 697.870018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 697.870209] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Deleting the datastore file [datastore1] 55dd32b0-e67f-4943-86e8-b9956267fedc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.870487] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5c42875-a130-4d4c-b89f-a90f3fc2eb3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.876649] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for the task: (returnval){ [ 697.876649] env[69994]: value = "task-2925158" [ 697.876649] env[69994]: _type = "Task" [ 697.876649] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.885404] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.888410] env[69994]: DEBUG nova.compute.manager [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-plugged-baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.888410] env[69994]: DEBUG oslo_concurrency.lockutils [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.888410] env[69994]: DEBUG oslo_concurrency.lockutils [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.888410] env[69994]: DEBUG oslo_concurrency.lockutils [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.888410] env[69994]: DEBUG nova.compute.manager [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] No waiting events found dispatching network-vif-plugged-baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.888582] env[69994]: WARNING nova.compute.manager [req-c26af155-ae9a-42c6-922b-d2f93098b3ce req-fc51df14-146e-4fd6-98b1-36e68efb0612 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received unexpected event network-vif-plugged-baab9a7d-282b-4491-baef-b768fcba09be for instance with vm_state building and task_state spawning. [ 697.925183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "03a10403-0253-4df0-84b2-1e56f0c057fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.925443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.194829] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.312418] env[69994]: DEBUG nova.scheduler.client.report [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.388839] env[69994]: DEBUG oslo_vmware.api [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Task: {'id': task-2925158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.490981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.389164] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 698.389421] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 698.389559] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 698.389755] env[69994]: INFO nova.compute.manager [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 698.390023] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.390684] env[69994]: DEBUG nova.compute.manager [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 698.390684] env[69994]: DEBUG nova.network.neutron [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.697507] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925156, 'name': ReconfigVM_Task, 'duration_secs': 0.742529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.697998] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4/e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.698554] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-111c3969-381a-440a-b92d-937939d2bb09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.705916] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 698.705916] env[69994]: value = "task-2925159" [ 698.705916] env[69994]: _type = "Task" [ 698.705916] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.714809] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925159, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.818762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.819327] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.822597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.646s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.822990] env[69994]: DEBUG nova.objects.instance [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 699.216020] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925159, 'name': Rename_Task, 'duration_secs': 0.19159} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.216313] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 699.216576] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df600ae4-395e-414b-b457-1095420574e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.222443] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 699.222443] env[69994]: value = "task-2925161" [ 699.222443] env[69994]: _type = "Task" [ 699.222443] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.230831] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.282924] env[69994]: DEBUG nova.network.neutron [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.324602] env[69994]: DEBUG nova.compute.utils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.326494] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.326757] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.407727] env[69994]: DEBUG nova.policy [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b097d35bead844a2a00c96adb23ef626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '656c3c77f2b642a7846464642723e0cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.541744] env[69994]: DEBUG nova.compute.manager [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.541950] env[69994]: DEBUG nova.compute.manager [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing instance network info cache due to event network-changed-02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.542780] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] Acquiring lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.542780] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] Acquired lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.542780] env[69994]: DEBUG nova.network.neutron [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Refreshing network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.735076] env[69994]: DEBUG oslo_vmware.api [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925161, 'name': PowerOnVM_Task, 'duration_secs': 0.434199} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.735613] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.735925] env[69994]: INFO nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Took 8.51 seconds to spawn the instance on the hypervisor. [ 699.736165] env[69994]: DEBUG nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.737488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59f0ceb-b2b8-48e7-9fee-2ddcd9ff2f62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.785630] env[69994]: INFO nova.compute.manager [-] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Took 1.40 seconds to deallocate network for instance. [ 699.826083] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully updated port: 8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.836166] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.839778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1681e761-8109-4403-9b27-c2f7bdd0f601 tempest-ServersAdmin275Test-1977141088 tempest-ServersAdmin275Test-1977141088-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.841241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.247s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.842965] env[69994]: INFO nova.compute.claims [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.032478] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-changed-baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.032478] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing instance network info cache due to event network-changed-baab9a7d-282b-4491-baef-b768fcba09be. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 700.033290] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Acquiring lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.033290] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Acquired lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.033865] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing network info cache for port baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.155889] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Successfully created port: 7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.265500] env[69994]: INFO nova.compute.manager [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Took 39.11 seconds to build instance. [ 700.293553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.328905] env[69994]: DEBUG nova.network.neutron [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updated VIF entry in instance network info cache for port 02be7506-27b5-4ccf-93a0-19b365247a08. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.329354] env[69994]: DEBUG nova.network.neutron [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [{"id": "02be7506-27b5-4ccf-93a0-19b365247a08", "address": "fa:16:3e:ca:79:3f", "network": {"id": "b2fb76d9-abd4-4e51-940d-d357d2ff0e9a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2007639793-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29874baa31194323bf3566aa52711e4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02be7506-27", "ovs_interfaceid": "02be7506-27b5-4ccf-93a0-19b365247a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.606703] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.746150] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.766793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e95297ff-4608-43fd-8445-34594213e781 tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.270s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.836372] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] Releasing lock "refresh_cache-70e5674d-4627-4720-9b87-955c2749e010" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.836803] env[69994]: DEBUG nova.compute.manager [req-0c7e6f49-cbff-40be-87ef-1c56241ec384 req-bd994565-d5b8-48f4-ae37-32831d2acc31 service nova] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Received event network-vif-deleted-85de3239-5385-4cfe-ac05-cfb286970c9e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.847211] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.888741] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.890020] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.890020] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.890020] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.890020] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.890020] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.891758] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.891758] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.892583] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.894249] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.894249] env[69994]: DEBUG nova.virt.hardware [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.894249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96b00a8-1166-452a-b548-2ba1de2f85ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.906448] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dde5e06-7aa2-4d71-a440-a6376f462263 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.957172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "70e5674d-4627-4720-9b87-955c2749e010" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.957439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.957650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "70e5674d-4627-4720-9b87-955c2749e010-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.957846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.957991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.960081] env[69994]: INFO nova.compute.manager [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Terminating instance [ 701.250383] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Releasing lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.250383] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-plugged-8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.250383] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.250383] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.250566] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.250642] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] No waiting events found dispatching network-vif-plugged-8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.251420] env[69994]: WARNING nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received unexpected event network-vif-plugged-8ba05e4e-7501-48a5-9844-c7f0fc30072c for instance with vm_state building and task_state spawning. [ 701.251420] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-changed-8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.251420] env[69994]: DEBUG nova.compute.manager [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing instance network info cache due to event network-changed-8ba05e4e-7501-48a5-9844-c7f0fc30072c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 701.251420] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Acquiring lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.251587] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Acquired lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.251614] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing network info cache for port 8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.269461] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.427759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62051887-bfa5-4eed-bc9e-2625cf764e8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.437081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01567684-ec14-44ee-b42e-cbfcfe821df3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.471609] env[69994]: DEBUG nova.compute.manager [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 701.471844] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 701.472849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39429c8-2086-4ddb-9a11-a495958d6c78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.476045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a50b13-ac89-412d-8a8b-cfdb38d76b5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.486243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f95bc-e836-4f54-b8c9-9755fd4300ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.490703] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.490962] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eca06bec-e99c-4750-b92b-ff83aa22661c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.502858] env[69994]: DEBUG nova.compute.provider_tree [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.508655] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 701.508655] env[69994]: value = "task-2925162" [ 701.508655] env[69994]: _type = "Task" [ 701.508655] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.518203] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.812058] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.813154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.967755] env[69994]: DEBUG nova.network.neutron [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.011063] env[69994]: DEBUG nova.scheduler.client.report [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.034242] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925162, 'name': PowerOffVM_Task, 'duration_secs': 0.294005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.034242] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.034242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.034242] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fde6ac1f-f24e-4184-86b8-7564b96f0eda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.094304] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.094304] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.094304] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Deleting the datastore file [datastore2] 70e5674d-4627-4720-9b87-955c2749e010 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.094304] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-888cb10a-d651-4f10-bd50-46ab99509bc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.105732] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for the task: (returnval){ [ 702.105732] env[69994]: value = "task-2925164" [ 702.105732] env[69994]: _type = "Task" [ 702.105732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.117232] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.318889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.319701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.320021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.320601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.320966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.327943] env[69994]: INFO nova.compute.manager [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Terminating instance [ 702.478015] env[69994]: DEBUG oslo_concurrency.lockutils [req-47c63224-1f60-45d3-b5d3-7819d9dcd02f req-1a711dc8-029c-46e1-974d-5769dfb28a85 service nova] Releasing lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.527123] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.527123] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 702.528487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.735s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.533913] env[69994]: INFO nova.compute.claims [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.619768] env[69994]: DEBUG oslo_vmware.api [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Task: {'id': task-2925164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.619858] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 702.623019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 702.623019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.623019] env[69994]: INFO nova.compute.manager [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Took 1.15 seconds to destroy the instance on the hypervisor. [ 702.623019] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 702.623019] env[69994]: DEBUG nova.compute.manager [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 702.623314] env[69994]: DEBUG nova.network.neutron [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.835660] env[69994]: DEBUG nova.compute.manager [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 702.835660] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.835660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e198e6f-f09e-4559-8654-dddbdc48cb8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.846830] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.847147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cadee8d0-28eb-47d2-8158-1fd210fbf8a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.854044] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 702.854044] env[69994]: value = "task-2925165" [ 702.854044] env[69994]: _type = "Task" [ 702.854044] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.864426] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.990034] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Successfully updated port: 7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.017162] env[69994]: DEBUG nova.compute.manager [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Received event network-vif-plugged-7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.017162] env[69994]: DEBUG oslo_concurrency.lockutils [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] Acquiring lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.017360] env[69994]: DEBUG oslo_concurrency.lockutils [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.018024] env[69994]: DEBUG oslo_concurrency.lockutils [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.018207] env[69994]: DEBUG nova.compute.manager [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] No waiting events found dispatching network-vif-plugged-7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.018377] env[69994]: WARNING nova.compute.manager [req-303deff9-87d1-4c58-837b-613c888962fe req-da457592-ea98-419e-a68c-cf46a7d5a7a1 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Received unexpected event network-vif-plugged-7f9115c8-be0b-4607-b9e6-69371f8cef21 for instance with vm_state building and task_state spawning. [ 703.040978] env[69994]: DEBUG nova.compute.utils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 703.050722] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.050976] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.080791] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Successfully updated port: 1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.097689] env[69994]: DEBUG nova.policy [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1be5d892102470990945b2dc1678832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b24b45b67e4a7aade59619ba342f82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.255178] env[69994]: DEBUG nova.compute.manager [req-bf71cdd4-20cd-4827-b377-2fadad81f706 req-38d341c3-8548-4706-81c9-d30f4f85560e service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Received event network-vif-deleted-02be7506-27b5-4ccf-93a0-19b365247a08 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.255419] env[69994]: INFO nova.compute.manager [req-bf71cdd4-20cd-4827-b377-2fadad81f706 req-38d341c3-8548-4706-81c9-d30f4f85560e service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Neutron deleted interface 02be7506-27b5-4ccf-93a0-19b365247a08; detaching it from the instance and deleting it from the info cache [ 703.255630] env[69994]: DEBUG nova.network.neutron [req-bf71cdd4-20cd-4827-b377-2fadad81f706 req-38d341c3-8548-4706-81c9-d30f4f85560e service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.310492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.310693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.370557] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925165, 'name': PowerOffVM_Task, 'duration_secs': 0.22398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.371124] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.371124] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.371446] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c1a349e-aed5-481a-99c3-e7fd3618c5f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.430409] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 703.430738] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 703.431206] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Deleting the datastore file [datastore1] e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.431684] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8535026a-bbdd-46e6-a31d-38ceb4850d9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.439261] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for the task: (returnval){ [ 703.439261] env[69994]: value = "task-2925167" [ 703.439261] env[69994]: _type = "Task" [ 703.439261] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.451131] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.499170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.499170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquired lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.499170] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.500388] env[69994]: DEBUG nova.network.neutron [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.551448] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 703.589491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.589567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.589675] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.761088] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f04e7d7f-9ade-4c19-bc3c-964d835f2b8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.770963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d88d12e-d1d8-4eb5-a649-532a251ff77b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.803186] env[69994]: DEBUG nova.compute.manager [req-bf71cdd4-20cd-4827-b377-2fadad81f706 req-38d341c3-8548-4706-81c9-d30f4f85560e service nova] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Detach interface failed, port_id=02be7506-27b5-4ccf-93a0-19b365247a08, reason: Instance 70e5674d-4627-4720-9b87-955c2749e010 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 703.913201] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Successfully created port: e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.948866] env[69994]: DEBUG oslo_vmware.api [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Task: {'id': task-2925167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154318} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.950433] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.950433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.950433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.950433] env[69994]: INFO nova.compute.manager [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 703.950433] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 703.950786] env[69994]: DEBUG nova.compute.manager [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 703.950786] env[69994]: DEBUG nova.network.neutron [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.005016] env[69994]: INFO nova.compute.manager [-] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Took 1.38 seconds to deallocate network for instance. [ 704.068768] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.072715] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fac079-2e11-4983-b7a8-d1f7646fe66b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.085274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546e80b3-7660-43ee-aab6-b147554f0521 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.132400] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554acfdc-ba16-4fd5-9c15-4f31684fa9cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.142193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11058cf1-465a-4f9c-a1b9-fdfc380a0960 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.157596] env[69994]: DEBUG nova.compute.provider_tree [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.196017] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.429317] env[69994]: DEBUG nova.network.neutron [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Updating instance_info_cache with network_info: [{"id": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "address": "fa:16:3e:6e:e3:ba", "network": {"id": "fb1d30c9-89e9-41cf-9813-02d6fc75b13a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-882563135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "656c3c77f2b642a7846464642723e0cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9115c8-be", "ovs_interfaceid": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.516832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.572884] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.604520] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.605197] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.605395] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.605618] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.605799] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.605970] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.606611] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.606611] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.606760] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.606996] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.607234] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.608833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af709ce3-ae02-401f-9643-efe548552528 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.618336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9cd4b7-ae1c-430e-b3b5-32c7658739d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.661318] env[69994]: DEBUG nova.scheduler.client.report [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.693019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.693505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.802182] env[69994]: DEBUG nova.network.neutron [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [{"id": "baab9a7d-282b-4491-baef-b768fcba09be", "address": "fa:16:3e:76:32:3f", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaab9a7d-28", "ovs_interfaceid": "baab9a7d-282b-4491-baef-b768fcba09be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "address": "fa:16:3e:97:da:bf", "network": {"id": "f3ea56f2-4504-432f-8c1d-ed3f45470639", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1891970213", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba05e4e-75", "ovs_interfaceid": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "address": "fa:16:3e:52:aa:68", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ccd0f30-a9", "ovs_interfaceid": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.931559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Releasing lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.931899] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Instance network_info: |[{"id": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "address": "fa:16:3e:6e:e3:ba", "network": {"id": "fb1d30c9-89e9-41cf-9813-02d6fc75b13a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-882563135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "656c3c77f2b642a7846464642723e0cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9115c8-be", "ovs_interfaceid": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.932404] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:e3:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f204ad5-8a45-4372-80ba-010fe0f9a337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f9115c8-be0b-4607-b9e6-69371f8cef21', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.940241] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Creating folder: Project (656c3c77f2b642a7846464642723e0cc). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.940484] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a92d7c79-d91e-440f-9ed3-402ec9b47fac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.954218] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Created folder: Project (656c3c77f2b642a7846464642723e0cc) in parent group-v587342. [ 704.954218] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Creating folder: Instances. Parent ref: group-v587414. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.954631] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c32af43a-38b4-41eb-962e-b47bbcf56a88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.659483] env[69994]: DEBUG nova.network.neutron [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.661178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.133s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.661624] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 705.664269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.664537] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance network_info: |[{"id": "baab9a7d-282b-4491-baef-b768fcba09be", "address": "fa:16:3e:76:32:3f", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaab9a7d-28", "ovs_interfaceid": "baab9a7d-282b-4491-baef-b768fcba09be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "address": "fa:16:3e:97:da:bf", "network": {"id": "f3ea56f2-4504-432f-8c1d-ed3f45470639", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1891970213", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba05e4e-75", "ovs_interfaceid": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "address": "fa:16:3e:52:aa:68", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ccd0f30-a9", "ovs_interfaceid": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 705.666361] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-plugged-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.666560] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.666758] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.666918] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.667094] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] No waiting events found dispatching network-vif-plugged-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 705.667265] env[69994]: WARNING nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received unexpected event network-vif-plugged-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 for instance with vm_state building and task_state spawning. [ 705.667421] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Received event network-changed-7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.667569] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Refreshing instance network info cache due to event network-changed-7f9115c8-be0b-4607-b9e6-69371f8cef21. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 705.667750] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Acquiring lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.667943] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Acquired lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.668032] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Refreshing network info cache for port 7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 705.669501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.438s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.669770] env[69994]: DEBUG nova.objects.instance [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lazy-loading 'resources' on Instance uuid 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.670878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:32:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'baab9a7d-282b-4491-baef-b768fcba09be', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:da:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ba05e4e-7501-48a5-9844-c7f0fc30072c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:aa:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.681758] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Creating folder: Project (a35626d7559c4a72a8f0e932b3d47de7). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.686239] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28dfb42d-c173-454a-8da8-6895ba196847 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.691513] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Created folder: Instances in parent group-v587414. [ 705.691737] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.691918] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.692132] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f71bb55-c2fb-49ad-af19-d002c464ba62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.708800] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Created folder: Project (a35626d7559c4a72a8f0e932b3d47de7) in parent group-v587342. [ 705.709009] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Creating folder: Instances. Parent ref: group-v587416. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.710059] env[69994]: DEBUG nova.compute.manager [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Received event network-vif-plugged-e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.710373] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] Acquiring lock "db9f7abd-ab45-49a3-9035-695b26756142-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.710467] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] Lock "db9f7abd-ab45-49a3-9035-695b26756142-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.710610] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] Lock "db9f7abd-ab45-49a3-9035-695b26756142-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.710776] env[69994]: DEBUG nova.compute.manager [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] No waiting events found dispatching network-vif-plugged-e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 705.710977] env[69994]: WARNING nova.compute.manager [req-0cfae474-f260-4905-8391-1c37b1ed92f0 req-a6b6925c-0585-434e-a0f3-2d4c4a43ca97 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Received unexpected event network-vif-plugged-e7eb12ca-e58a-4b85-acd5-5b8d14209edc for instance with vm_state building and task_state spawning. [ 705.711687] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f674dd5a-581a-41e8-9063-4c38e0f38167 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.715724] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.715724] env[69994]: value = "task-2925171" [ 705.715724] env[69994]: _type = "Task" [ 705.715724] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.722117] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Created folder: Instances in parent group-v587416. [ 705.722334] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.725248] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.725644] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925171, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.725886] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d524f42f-b82e-4ce3-8557-2ab2c1888909 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.746946] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Successfully updated port: e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.752604] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.752604] env[69994]: value = "task-2925173" [ 705.752604] env[69994]: _type = "Task" [ 705.752604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.760765] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925173, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.997126] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Updated VIF entry in instance network info cache for port 7f9115c8-be0b-4607-b9e6-69371f8cef21. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 705.997126] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Updating instance_info_cache with network_info: [{"id": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "address": "fa:16:3e:6e:e3:ba", "network": {"id": "fb1d30c9-89e9-41cf-9813-02d6fc75b13a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-882563135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "656c3c77f2b642a7846464642723e0cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9115c8-be", "ovs_interfaceid": "7f9115c8-be0b-4607-b9e6-69371f8cef21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.084368] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.084704] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.170147] env[69994]: INFO nova.compute.manager [-] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Took 2.22 seconds to deallocate network for instance. [ 706.171391] env[69994]: DEBUG nova.compute.utils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 706.174605] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 706.174763] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 706.225792] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925171, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.227254] env[69994]: DEBUG nova.policy [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1be5d892102470990945b2dc1678832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b24b45b67e4a7aade59619ba342f82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.250299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.251041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.251041] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.264537] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925173, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.499102] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Releasing lock "refresh_cache-e87e1839-9fef-462d-b1ab-842ef76828a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.499568] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-changed-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.499833] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing instance network info cache due to event network-changed-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 706.500148] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Acquiring lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.500389] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Acquired lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.500571] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Refreshing network info cache for port 1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.563158] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Successfully created port: 34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.597210] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 706.597210] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.680846] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 706.684979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.736654] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925171, 'name': CreateVM_Task, 'duration_secs': 0.697072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.738063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.738794] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.738950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.739296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.739803] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27e86d9a-6e5b-44e9-96ab-132312e0e1bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.746717] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 706.746717] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b789ca-1d88-efc1-961e-50eeb957d717" [ 706.746717] env[69994]: _type = "Task" [ 706.746717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.747197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b704b7-43b7-4c89-9e4f-6b255c19ff67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.765776] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b789ca-1d88-efc1-961e-50eeb957d717, 'name': SearchDatastore_Task, 'duration_secs': 0.011967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.767074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b35d93-8a76-4d20-ad13-a74bf365d47c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.770082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.770320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.770542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.770676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.770846] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.771948] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00e076d3-46cb-4850-8b52-5777348b624b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.777191] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925173, 'name': CreateVM_Task, 'duration_secs': 0.789402} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.777641] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.778481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.778626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.778913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.779636] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f874f98-a458-4a00-b367-c1f070551786 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.810184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a99a2d-5945-4299-a798-e90131e39e4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.813133] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.813298] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.815074] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b5fe3e-2f35-4031-a9c9-e24d72fd806d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.817557] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 706.817557] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522193e0-e311-8bff-fbbb-cd6f2e7b31ba" [ 706.817557] env[69994]: _type = "Task" [ 706.817557] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.824317] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 706.824317] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526896e5-f166-b416-5eac-33ecce2ed384" [ 706.824317] env[69994]: _type = "Task" [ 706.824317] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.824965] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.828950] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c55377-0f77-4d6e-808c-90b119354f89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.839365] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522193e0-e311-8bff-fbbb-cd6f2e7b31ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010616} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.840072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.840285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.840694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.849082] env[69994]: DEBUG nova.compute.provider_tree [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.853694] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526896e5-f166-b416-5eac-33ecce2ed384, 'name': SearchDatastore_Task, 'duration_secs': 0.011455} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.854831] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a8bd6c7-2aa3-45a2-88ac-33bac269f481 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.860284] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 706.860284] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9946c-dca9-26fb-f473-75ef55c55564" [ 706.860284] env[69994]: _type = "Task" [ 706.860284] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.869733] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9946c-dca9-26fb-f473-75ef55c55564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.079037] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Updating instance_info_cache with network_info: [{"id": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "address": "fa:16:3e:f2:70:35", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7eb12ca-e5", "ovs_interfaceid": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.100148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.358606] env[69994]: DEBUG nova.scheduler.client.report [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.378381] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9946c-dca9-26fb-f473-75ef55c55564, 'name': SearchDatastore_Task, 'duration_secs': 0.009949} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.378692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.380977] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e87e1839-9fef-462d-b1ab-842ef76828a4/e87e1839-9fef-462d-b1ab-842ef76828a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.380977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.380977] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.381153] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffda2dd3-e289-4ab3-aabc-29e38a6bcc4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.384413] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdd99df4-929b-4c55-86f9-02c38648c3ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.394482] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 707.394482] env[69994]: value = "task-2925174" [ 707.394482] env[69994]: _type = "Task" [ 707.394482] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.397733] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.399947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.401452] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-604a91ad-1e02-4342-b58a-5d3ad11994be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.406889] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.408861] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updated VIF entry in instance network info cache for port 1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.409489] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [{"id": "baab9a7d-282b-4491-baef-b768fcba09be", "address": "fa:16:3e:76:32:3f", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaab9a7d-28", "ovs_interfaceid": "baab9a7d-282b-4491-baef-b768fcba09be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "address": "fa:16:3e:97:da:bf", "network": {"id": "f3ea56f2-4504-432f-8c1d-ed3f45470639", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1891970213", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba05e4e-75", "ovs_interfaceid": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "address": "fa:16:3e:52:aa:68", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ccd0f30-a9", "ovs_interfaceid": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.412874] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 707.412874] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c573fa-b086-4812-f670-ab6522811091" [ 707.412874] env[69994]: _type = "Task" [ 707.412874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.422208] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c573fa-b086-4812-f670-ab6522811091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.585327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.585327] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Instance network_info: |[{"id": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "address": "fa:16:3e:f2:70:35", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7eb12ca-e5", "ovs_interfaceid": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.585327] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:70:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7eb12ca-e58a-4b85-acd5-5b8d14209edc', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.592609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating folder: Project (b5b24b45b67e4a7aade59619ba342f82). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.593812] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-771274c7-09dd-4f91-adb8-e25b616c5768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.606274] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created folder: Project (b5b24b45b67e4a7aade59619ba342f82) in parent group-v587342. [ 707.606955] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating folder: Instances. Parent ref: group-v587420. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.607471] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a749f19-c1b0-4dd5-8e3f-27051d0fa5a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.622109] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created folder: Instances in parent group-v587420. [ 707.622109] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 707.622109] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.622109] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1126bf33-8b69-4814-806b-b360e3632951 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.650936] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.650936] env[69994]: value = "task-2925177" [ 707.650936] env[69994]: _type = "Task" [ 707.650936] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.662698] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925177, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.688684] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 707.712984] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 707.713557] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.713557] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 707.714302] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.714850] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 707.715073] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 707.715521] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 707.715769] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 707.715983] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 707.716242] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 707.716953] env[69994]: DEBUG nova.virt.hardware [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 707.718501] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b06853-37ba-473b-a62e-72bcafc01c78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.731994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9658e625-695d-40fb-8480-ead46c6fd498 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.739165] env[69994]: DEBUG nova.compute.manager [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Received event network-changed-e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.739165] env[69994]: DEBUG nova.compute.manager [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Refreshing instance network info cache due to event network-changed-e7eb12ca-e58a-4b85-acd5-5b8d14209edc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 707.739165] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] Acquiring lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.739165] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] Acquired lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.739165] env[69994]: DEBUG nova.network.neutron [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Refreshing network info cache for port e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.873597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.880068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.278s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.880068] env[69994]: INFO nova.compute.claims [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.898890] env[69994]: INFO nova.scheduler.client.report [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Deleted allocations for instance 316ab41e-d3c1-4cef-8d63-a138e21d0ea3 [ 707.907613] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45881} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.908103] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e87e1839-9fef-462d-b1ab-842ef76828a4/e87e1839-9fef-462d-b1ab-842ef76828a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.908326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.908577] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73e76be5-3259-4683-a067-545e363a5961 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.916821] env[69994]: DEBUG oslo_concurrency.lockutils [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] Releasing lock "refresh_cache-c512ee01-7d45-49f0-b2ce-659392527264" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.917082] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Received event network-vif-deleted-4513bec6-f476-4ac6-91cf-67ff8b19e2cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.917295] env[69994]: INFO nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Neutron deleted interface 4513bec6-f476-4ac6-91cf-67ff8b19e2cb; detaching it from the instance and deleting it from the info cache [ 707.917442] env[69994]: DEBUG nova.network.neutron [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.921509] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 707.921509] env[69994]: value = "task-2925180" [ 707.921509] env[69994]: _type = "Task" [ 707.921509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.936561] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.941797] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c573fa-b086-4812-f670-ab6522811091, 'name': SearchDatastore_Task, 'duration_secs': 0.016542} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.943052] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dddb1ca4-9e6d-4e34-a6ce-dd45b034ded4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.952138] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 707.952138] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526af67e-3ffa-f330-fea9-1eedaee5e9d9" [ 707.952138] env[69994]: _type = "Task" [ 707.952138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.966922] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526af67e-3ffa-f330-fea9-1eedaee5e9d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.163387] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925177, 'name': CreateVM_Task, 'duration_secs': 0.457889} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.163692] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 708.164298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.164460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.164770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 708.165072] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1560607e-736f-4883-8365-491d9b126dc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.172538] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 708.172538] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d2a0d-9bc0-d3d0-b5c2-9cbce896009f" [ 708.172538] env[69994]: _type = "Task" [ 708.172538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.181230] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d2a0d-9bc0-d3d0-b5c2-9cbce896009f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.377845] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Successfully updated port: 34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.410970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-45f45c5b-ab46-4299-9357-cd6dee0f55ad tempest-ServersAdmin275Test-1953154045 tempest-ServersAdmin275Test-1953154045-project-member] Lock "316ab41e-d3c1-4cef-8d63-a138e21d0ea3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.585s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.430577] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c2c1543-6730-422c-958f-6da308e3772b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.440570] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.441959] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 708.442820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44794ca-0955-497e-b9a4-1697a816b58a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.448137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1966e2c2-a393-4d80-9d85-209da51c780b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.494777] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] e87e1839-9fef-462d-b1ab-842ef76828a4/e87e1839-9fef-462d-b1ab-842ef76828a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 708.506396] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c076f51d-5cc7-4c3e-b32d-dcd6138d543f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.521474] env[69994]: DEBUG nova.compute.manager [req-a4f91853-994d-4e5a-94e2-473ebbab379e req-1bf8f999-4882-40cf-a990-4a92afb35ffe service nova] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Detach interface failed, port_id=4513bec6-f476-4ac6-91cf-67ff8b19e2cb, reason: Instance e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 708.525884] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526af67e-3ffa-f330-fea9-1eedaee5e9d9, 'name': SearchDatastore_Task, 'duration_secs': 0.013969} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.527208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.527208] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] c512ee01-7d45-49f0-b2ce-659392527264/c512ee01-7d45-49f0-b2ce-659392527264.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.527208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2194fa1-e658-4ca6-b40e-61c6f559e010 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.533261] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 708.533261] env[69994]: value = "task-2925182" [ 708.533261] env[69994]: _type = "Task" [ 708.533261] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.534868] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 708.534868] env[69994]: value = "task-2925183" [ 708.534868] env[69994]: _type = "Task" [ 708.534868] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.552644] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.556330] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925182, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.582063] env[69994]: DEBUG nova.network.neutron [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Updated VIF entry in instance network info cache for port e7eb12ca-e58a-4b85-acd5-5b8d14209edc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 708.582063] env[69994]: DEBUG nova.network.neutron [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Updating instance_info_cache with network_info: [{"id": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "address": "fa:16:3e:f2:70:35", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7eb12ca-e5", "ovs_interfaceid": "e7eb12ca-e58a-4b85-acd5-5b8d14209edc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.685032] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d2a0d-9bc0-d3d0-b5c2-9cbce896009f, 'name': SearchDatastore_Task, 'duration_secs': 0.010564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.685032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.685032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.685366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.685518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.685713] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.685986] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68c90405-e92d-4cda-9af5-b88c87102e8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.702361] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.702547] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.703544] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f136f427-b315-441b-98a7-ee660248a18f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.711593] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 708.711593] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520d9ec1-0ff5-a971-94fc-a60ae5175a8a" [ 708.711593] env[69994]: _type = "Task" [ 708.711593] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.723426] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520d9ec1-0ff5-a971-94fc-a60ae5175a8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.882743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.882743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.882743] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.044600] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925182, 'name': ReconfigVM_Task, 'duration_secs': 0.5023} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.050354] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Reconfigured VM instance instance-00000017 to attach disk [datastore1] e87e1839-9fef-462d-b1ab-842ef76828a4/e87e1839-9fef-462d-b1ab-842ef76828a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.051205] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c275ab50-126b-4511-a7ad-82bc3dcf529f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.058640] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925183, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.060067] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 709.060067] env[69994]: value = "task-2925184" [ 709.060067] env[69994]: _type = "Task" [ 709.060067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.073071] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925184, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.085658] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb5d4e4-2718-4335-8f23-a719bb378819 req-6212bb73-c18d-471d-b0d6-5fefca229634 service nova] Releasing lock "refresh_cache-db9f7abd-ab45-49a3-9035-695b26756142" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.224246] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520d9ec1-0ff5-a971-94fc-a60ae5175a8a, 'name': SearchDatastore_Task, 'duration_secs': 0.062968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.225065] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af4ea0e-f619-4833-b01e-1abfbd5acec3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.233110] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 709.233110] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea00d8-e83d-40ce-aabe-8d33f496f432" [ 709.233110] env[69994]: _type = "Task" [ 709.233110] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.241048] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea00d8-e83d-40ce-aabe-8d33f496f432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.390388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d430c665-e2d6-42ca-b55a-f25e6a494583 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.400296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fb72d8-7767-4dd6-a212-20793807a004 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.433984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc933ffc-f530-44a7-9f7a-4afc3b812bc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.437461] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.447731] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23db474a-ca93-44f3-8fee-71449c4e4853 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.460926] env[69994]: DEBUG nova.compute.provider_tree [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.555485] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.555780] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] c512ee01-7d45-49f0-b2ce-659392527264/c512ee01-7d45-49f0-b2ce-659392527264.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.556129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.556364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66c3606e-6624-42f2-9722-2bc706814d10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.566076] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 709.566076] env[69994]: value = "task-2925185" [ 709.566076] env[69994]: _type = "Task" [ 709.566076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.573211] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925184, 'name': Rename_Task, 'duration_secs': 0.353437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.576536] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.576852] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c23cf7a1-da28-40ad-ab70-5beb6a816e7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.583601] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.590739] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 709.590739] env[69994]: value = "task-2925186" [ 709.590739] env[69994]: _type = "Task" [ 709.590739] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.604915] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.647175] env[69994]: DEBUG nova.network.neutron [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Updating instance_info_cache with network_info: [{"id": "34344b41-5493-4a10-b542-c94483d0abfb", "address": "fa:16:3e:9c:1d:96", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34344b41-54", "ovs_interfaceid": "34344b41-5493-4a10-b542-c94483d0abfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.743800] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea00d8-e83d-40ce-aabe-8d33f496f432, 'name': SearchDatastore_Task, 'duration_secs': 0.010407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.744196] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.744522] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] db9f7abd-ab45-49a3-9035-695b26756142/db9f7abd-ab45-49a3-9035-695b26756142.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.745132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49c160aa-c1ed-4e4c-a7a1-6fca5c2bf8ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.753409] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 709.753409] env[69994]: value = "task-2925187" [ 709.753409] env[69994]: _type = "Task" [ 709.753409] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.761701] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.859034] env[69994]: DEBUG nova.compute.manager [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Received event network-vif-plugged-34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.859323] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Acquiring lock "367665db-def4-4148-a316-b83378e00ba8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.859565] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Lock "367665db-def4-4148-a316-b83378e00ba8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.859750] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Lock "367665db-def4-4148-a316-b83378e00ba8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.859936] env[69994]: DEBUG nova.compute.manager [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] No waiting events found dispatching network-vif-plugged-34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.860135] env[69994]: WARNING nova.compute.manager [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Received unexpected event network-vif-plugged-34344b41-5493-4a10-b542-c94483d0abfb for instance with vm_state building and task_state spawning. [ 709.860437] env[69994]: DEBUG nova.compute.manager [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Received event network-changed-34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.860620] env[69994]: DEBUG nova.compute.manager [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Refreshing instance network info cache due to event network-changed-34344b41-5493-4a10-b542-c94483d0abfb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 709.860818] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Acquiring lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.965896] env[69994]: DEBUG nova.scheduler.client.report [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.080898] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.081496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.082899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76883abe-7fec-4a0d-998e-59f4dffe3119 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.119610] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] c512ee01-7d45-49f0-b2ce-659392527264/c512ee01-7d45-49f0-b2ce-659392527264.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.122278] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a7cdb7f-556f-4ed2-b7cd-afe28918c716 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.143970] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925186, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.149545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.149897] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Instance network_info: |[{"id": "34344b41-5493-4a10-b542-c94483d0abfb", "address": "fa:16:3e:9c:1d:96", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34344b41-54", "ovs_interfaceid": "34344b41-5493-4a10-b542-c94483d0abfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 710.150298] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Acquired lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.150608] env[69994]: DEBUG nova.network.neutron [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Refreshing network info cache for port 34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.152278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:1d:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34344b41-5493-4a10-b542-c94483d0abfb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.165192] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.167709] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.169259] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 710.169259] env[69994]: value = "task-2925188" [ 710.169259] env[69994]: _type = "Task" [ 710.169259] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.169259] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e647f401-1f62-4dbf-87d6-ec1f332d4307 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.201117] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.202813] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.202813] env[69994]: value = "task-2925189" [ 710.202813] env[69994]: _type = "Task" [ 710.202813] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.215474] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925189, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.265498] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45946} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.266101] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] db9f7abd-ab45-49a3-9035-695b26756142/db9f7abd-ab45-49a3-9035-695b26756142.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.266101] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.267841] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c29da6d2-649c-41e9-b91e-9a9cef38c668 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.275422] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 710.275422] env[69994]: value = "task-2925190" [ 710.275422] env[69994]: _type = "Task" [ 710.275422] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.285995] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.471685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.472432] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 710.476495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.300s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.476495] env[69994]: INFO nova.compute.claims [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.605855] env[69994]: DEBUG oslo_vmware.api [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925186, 'name': PowerOnVM_Task, 'duration_secs': 0.5603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.606254] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.606881] env[69994]: INFO nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Took 9.76 seconds to spawn the instance on the hypervisor. [ 710.606881] env[69994]: DEBUG nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.607836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6b7f0a-f02d-4c9d-9ac2-ea4dcee83e71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.701920] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925188, 'name': ReconfigVM_Task, 'duration_secs': 0.339382} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.702310] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Reconfigured VM instance instance-00000016 to attach disk [datastore1] c512ee01-7d45-49f0-b2ce-659392527264/c512ee01-7d45-49f0-b2ce-659392527264.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.703039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bd574ea-5e4e-48ab-9f07-2bb8ca426158 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.717553] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925189, 'name': CreateVM_Task, 'duration_secs': 0.36592} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.718897] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.719296] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 710.719296] env[69994]: value = "task-2925191" [ 710.719296] env[69994]: _type = "Task" [ 710.719296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.720148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.720360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.721270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.723798] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-265baf31-06d3-4953-87c0-f6c4c5a9dddd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.737424] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 710.737424] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52586d0b-ef26-905a-2d8f-f38dbf8f512a" [ 710.737424] env[69994]: _type = "Task" [ 710.737424] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.741231] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925191, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.753631] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52586d0b-ef26-905a-2d8f-f38dbf8f512a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.787601] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09076} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.787894] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.788783] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30487577-b023-4dbe-b7b2-3cb6fdb0f8e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.812626] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] db9f7abd-ab45-49a3-9035-695b26756142/db9f7abd-ab45-49a3-9035-695b26756142.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.812985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83388a52-354d-4997-9cd8-5831d338f865 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.840888] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 710.840888] env[69994]: value = "task-2925192" [ 710.840888] env[69994]: _type = "Task" [ 710.840888] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.858125] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.982258] env[69994]: DEBUG nova.compute.utils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 710.985847] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 711.028760] env[69994]: DEBUG nova.network.neutron [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Updated VIF entry in instance network info cache for port 34344b41-5493-4a10-b542-c94483d0abfb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 711.029195] env[69994]: DEBUG nova.network.neutron [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Updating instance_info_cache with network_info: [{"id": "34344b41-5493-4a10-b542-c94483d0abfb", "address": "fa:16:3e:9c:1d:96", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34344b41-54", "ovs_interfaceid": "34344b41-5493-4a10-b542-c94483d0abfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.158597] env[69994]: INFO nova.compute.manager [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Took 44.17 seconds to build instance. [ 711.233471] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925191, 'name': Rename_Task, 'duration_secs': 0.245967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.233869] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.234187] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74ac5bbe-c460-47d6-89b9-2af4905d96ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.242187] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 711.242187] env[69994]: value = "task-2925194" [ 711.242187] env[69994]: _type = "Task" [ 711.242187] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.258286] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52586d0b-ef26-905a-2d8f-f38dbf8f512a, 'name': SearchDatastore_Task, 'duration_secs': 0.019544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.262156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.262156] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.262156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.262343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.262421] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.262829] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.263146] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-783697e6-076a-4b26-a299-12eabb9ab490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.277210] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.277556] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.278205] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-615e347d-ea5a-447e-be7f-a94bf324b4c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.284662] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 711.284662] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ca02cc-9a59-2476-c428-e266f3d4fa97" [ 711.284662] env[69994]: _type = "Task" [ 711.284662] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.294949] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ca02cc-9a59-2476-c428-e266f3d4fa97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.353146] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925192, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.486794] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 711.532587] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68f14e9-f219-4e88-9682-a4fe65781080 req-f216a552-618d-43f1-8e54-4ba6250381a4 service nova] Releasing lock "refresh_cache-367665db-def4-4148-a316-b83378e00ba8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.663183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31296436-1e92-4fae-a392-3c358ebbfc68 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.610s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.762801] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925194, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.798938] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ca02cc-9a59-2476-c428-e266f3d4fa97, 'name': SearchDatastore_Task, 'duration_secs': 0.016888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.799821] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa0ce593-3e58-455d-9eac-30e3fd0a2bdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.812934] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 711.812934] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e764ab-2fda-20eb-fdd4-7bcba8fce593" [ 711.812934] env[69994]: _type = "Task" [ 711.812934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.834258] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e764ab-2fda-20eb-fdd4-7bcba8fce593, 'name': SearchDatastore_Task, 'duration_secs': 0.018819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.834823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.835125] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 367665db-def4-4148-a316-b83378e00ba8/367665db-def4-4148-a316-b83378e00ba8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.835414] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c760428-ec3b-47fc-98b2-9cede005af1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.850060] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 711.850060] env[69994]: value = "task-2925195" [ 711.850060] env[69994]: _type = "Task" [ 711.850060] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.858997] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925192, 'name': ReconfigVM_Task, 'duration_secs': 0.528217} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.860065] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Reconfigured VM instance instance-00000018 to attach disk [datastore1] db9f7abd-ab45-49a3-9035-695b26756142/db9f7abd-ab45-49a3-9035-695b26756142.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.861221] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c534a7d7-0a1d-4f0d-bc89-1305c58205e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.867706] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.877463] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 711.877463] env[69994]: value = "task-2925196" [ 711.877463] env[69994]: _type = "Task" [ 711.877463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.892315] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925196, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.951083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "interface-e87e1839-9fef-462d-b1ab-842ef76828a4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.951401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "interface-e87e1839-9fef-462d-b1ab-842ef76828a4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.951762] env[69994]: DEBUG nova.objects.instance [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lazy-loading 'flavor' on Instance uuid e87e1839-9fef-462d-b1ab-842ef76828a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.051055] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9193cae9-e493-48ff-9a01-b1628d9307ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.059556] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8bcbe6-5da4-42f6-ac2b-71bdfd992570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.090918] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721c86f6-1bbf-4c7d-97f0-eefeab790053 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.099577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be07b620-2af6-440c-9432-2c8192866974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.114389] env[69994]: DEBUG nova.compute.provider_tree [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.165712] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 712.265603] env[69994]: DEBUG oslo_vmware.api [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925194, 'name': PowerOnVM_Task, 'duration_secs': 0.617095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.265603] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 712.265745] env[69994]: INFO nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Took 16.28 seconds to spawn the instance on the hypervisor. [ 712.266251] env[69994]: DEBUG nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.267234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9a5b41-da0d-4ab3-b3bc-5a970fa3335b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.363801] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925195, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.392289] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925196, 'name': Rename_Task, 'duration_secs': 0.192883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.392289] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.392289] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b0a472e-fe4e-415d-98e4-bac91f49146d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.400195] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 712.400195] env[69994]: value = "task-2925197" [ 712.400195] env[69994]: _type = "Task" [ 712.400195] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.411889] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.456534] env[69994]: DEBUG nova.objects.instance [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lazy-loading 'pci_requests' on Instance uuid e87e1839-9fef-462d-b1ab-842ef76828a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.499683] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 712.529090] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 712.529383] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.529569] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 712.529756] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.529904] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 712.530062] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 712.530421] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 712.530421] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 712.530584] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 712.530744] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 712.530914] env[69994]: DEBUG nova.virt.hardware [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 712.531780] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff12103-91ee-49f1-ad73-c7fd8a8e56fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.540705] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611affc3-6a27-4672-8dfb-0ebff8c349ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.558135] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.564367] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Creating folder: Project (846775b1b5b94052b8d66685f1efec55). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.564870] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe9f1fd4-7e5f-428c-938a-63beaf350de2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.577035] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Created folder: Project (846775b1b5b94052b8d66685f1efec55) in parent group-v587342. [ 712.577336] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Creating folder: Instances. Parent ref: group-v587427. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.577653] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3b4a747-4088-42ed-b230-a34fe1c67270 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.588129] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Created folder: Instances in parent group-v587427. [ 712.588481] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.588737] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.589025] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-006dc37f-9c36-4d7d-9701-2899318033c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.607884] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.607884] env[69994]: value = "task-2925200" [ 712.607884] env[69994]: _type = "Task" [ 712.607884] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.618555] env[69994]: DEBUG nova.scheduler.client.report [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.636475] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925200, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.694317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.787277] env[69994]: INFO nova.compute.manager [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Took 49.19 seconds to build instance. [ 712.863808] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708394} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.863808] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 367665db-def4-4148-a316-b83378e00ba8/367665db-def4-4148-a316-b83378e00ba8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.863808] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.863808] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e4de4cd-7347-4ead-b94b-f7bac975f5d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.872829] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 712.872829] env[69994]: value = "task-2925201" [ 712.872829] env[69994]: _type = "Task" [ 712.872829] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.885235] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.915479] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925197, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.959358] env[69994]: DEBUG nova.objects.base [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 712.959532] env[69994]: DEBUG nova.network.neutron [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.048261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52dff8d6-77f9-4ad9-83e2-fc8d0b1c4321 tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "interface-e87e1839-9fef-462d-b1ab-842ef76828a4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.097s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.121596] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925200, 'name': CreateVM_Task, 'duration_secs': 0.364143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.121596] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 713.121949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.122130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.122487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 713.123273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.123756] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.126960] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f33ae417-0cce-41fd-8d18-da82c8091fdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.128733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 30.670s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.137564] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 713.137564] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52be6b03-95ca-36c6-6161-4cada2e8c05c" [ 713.137564] env[69994]: _type = "Task" [ 713.137564] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.150065] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52be6b03-95ca-36c6-6161-4cada2e8c05c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.290704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ef01978-1f8b-4bff-b3a2-a922c6715452 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.789s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.384240] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.384577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.385429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038fac82-7ccd-4cba-a784-e924c72496cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.410448] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 367665db-def4-4148-a316-b83378e00ba8/367665db-def4-4148-a316-b83378e00ba8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.414733] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a638eb72-a934-400c-a867-97a37b02cb2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.436508] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925197, 'name': PowerOnVM_Task, 'duration_secs': 0.785564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.437938] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.438334] env[69994]: INFO nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Took 8.87 seconds to spawn the instance on the hypervisor. [ 713.438334] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.438664] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 713.438664] env[69994]: value = "task-2925203" [ 713.438664] env[69994]: _type = "Task" [ 713.438664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.439391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ab6c8b-e2cd-45ac-90e8-f2b9f7f07c4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.455198] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925203, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.630107] env[69994]: DEBUG nova.compute.utils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 713.631551] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.631725] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.655647] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52be6b03-95ca-36c6-6161-4cada2e8c05c, 'name': SearchDatastore_Task, 'duration_secs': 0.015211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.656085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.656277] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 713.656516] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.656704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.656945] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.657515] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfd1056e-4ed6-4539-ae96-ff6f4b97eae1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.674153] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.674374] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 713.675744] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5be9fa-7a10-4576-82f0-d4330a16876d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.681931] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 713.681931] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52181a3d-f1d9-5b9b-8186-80e740b1380e" [ 713.681931] env[69994]: _type = "Task" [ 713.681931] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.694051] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52181a3d-f1d9-5b9b-8186-80e740b1380e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.729776] env[69994]: DEBUG nova.policy [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5667c67d9b6f44138d1479e901b60c74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34469ad51e694a3389595c28ef508144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.795224] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.951196] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925203, 'name': ReconfigVM_Task, 'duration_secs': 0.283104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.951494] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 367665db-def4-4148-a316-b83378e00ba8/367665db-def4-4148-a316-b83378e00ba8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.952130] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f121ab73-2b37-4a89-a80c-31e3ff6cd285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.962921] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 713.962921] env[69994]: value = "task-2925204" [ 713.962921] env[69994]: _type = "Task" [ 713.962921] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.971701] env[69994]: INFO nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Took 43.40 seconds to build instance. [ 713.980466] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925204, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.140245] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.153781] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4e127b-fb99-4381-a8f0-48bd082dfc64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.164027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d816590d-3655-4fb7-a5ba-6de8506f6444 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.198122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3d4163-5321-40c3-8db7-aee6e4a5ffcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.210554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574b2744-d16f-43b0-a8f7-c16f0c13c455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.215284] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52181a3d-f1d9-5b9b-8186-80e740b1380e, 'name': SearchDatastore_Task, 'duration_secs': 0.025198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.216270] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7123ecbc-4ab8-48a4-b483-df981d488665 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.227126] env[69994]: DEBUG nova.compute.provider_tree [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.229629] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 714.229629] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242d821-79bf-d19f-4b36-9f129fbb8fa1" [ 714.229629] env[69994]: _type = "Task" [ 714.229629] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.238359] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242d821-79bf-d19f-4b36-9f129fbb8fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.276667] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Successfully created port: 7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.318845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.474422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.251s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.480182] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925204, 'name': Rename_Task, 'duration_secs': 0.143956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.480954] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.481234] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57b74160-ca44-4ad7-b043-98015dd7ffe0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.490790] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 714.490790] env[69994]: value = "task-2925205" [ 714.490790] env[69994]: _type = "Task" [ 714.490790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.501774] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.734572] env[69994]: DEBUG nova.scheduler.client.report [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.752505] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242d821-79bf-d19f-4b36-9f129fbb8fa1, 'name': SearchDatastore_Task, 'duration_secs': 0.051785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.753874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.754325] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.755053] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29328d71-ff84-4a13-94c7-44b207236a29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.767810] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 714.767810] env[69994]: value = "task-2925206" [ 714.767810] env[69994]: _type = "Task" [ 714.767810] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.783367] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.981768] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.006384] env[69994]: DEBUG oslo_vmware.api [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925205, 'name': PowerOnVM_Task, 'duration_secs': 0.479915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.006887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.006949] env[69994]: INFO nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Took 7.32 seconds to spawn the instance on the hypervisor. [ 715.008035] env[69994]: DEBUG nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 715.009028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8872e41f-7d13-4a2c-8042-821b2774936e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.149562] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.180699] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.181408] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.181976] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.182573] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.182694] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.182900] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.183209] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.183342] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.183515] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.183734] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.183924] env[69994]: DEBUG nova.virt.hardware [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.185284] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5aa7f75-fb68-468d-88cf-81d7650ad699 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.199173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2717234-a0a1-475e-9daf-cce898b6cf5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.287187] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925206, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.502986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.529763] env[69994]: INFO nova.compute.manager [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Took 42.76 seconds to build instance. [ 715.691661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "e87e1839-9fef-462d-b1ab-842ef76828a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.691661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.691661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.691661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.691661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.692442] env[69994]: INFO nova.compute.manager [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Terminating instance [ 715.753272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.756839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.462s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.757056] env[69994]: DEBUG nova.objects.instance [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lazy-loading 'resources' on Instance uuid 9717f586-cedc-4f21-9ea6-7bf6e2991327 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 715.779369] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531038} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.779609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 715.780359] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 715.780359] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5047d52-deb7-4769-bbba-92c26ac77c77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.783998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.784317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.784530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "c512ee01-7d45-49f0-b2ce-659392527264-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.784728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.784896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.787035] env[69994]: INFO nova.compute.manager [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Terminating instance [ 715.792924] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 715.792924] env[69994]: value = "task-2925208" [ 715.792924] env[69994]: _type = "Task" [ 715.792924] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.805879] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.864407] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.866433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.007601] env[69994]: DEBUG nova.compute.manager [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Received event network-vif-plugged-7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.007825] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] Acquiring lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.008061] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.008226] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.008430] env[69994]: DEBUG nova.compute.manager [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] No waiting events found dispatching network-vif-plugged-7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.008634] env[69994]: WARNING nova.compute.manager [req-1d836eb0-e955-475e-ae06-4fd1e3a53251 req-23605c36-1471-4486-8448-cc61549a9527 service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Received unexpected event network-vif-plugged-7672d351-d6eb-466c-87d1-f7f798da34d4 for instance with vm_state building and task_state spawning. [ 716.032464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c48194cb-f494-4f6a-bef6-c42dd3455464 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.773s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.187308] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Successfully updated port: 7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.197142] env[69994]: DEBUG nova.compute.manager [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 716.197142] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.198076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e1a862-945e-43f1-ad9b-796dfc112b93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.208952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.209246] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a18675b-66db-4d98-baea-0f3d738cc728 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.216608] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 716.216608] env[69994]: value = "task-2925209" [ 716.216608] env[69994]: _type = "Task" [ 716.216608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.226432] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.292782] env[69994]: DEBUG nova.compute.manager [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 716.292782] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.292932] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe53aee-25e1-4e90-b7d4-e51cd662bdda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.307530] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069113} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.309891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 716.310356] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.311142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b877857-e91d-44e2-baac-973a17560eff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.313680] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-738e9663-3341-4d0a-b51a-020813c2524e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.319031] env[69994]: INFO nova.scheduler.client.report [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleted allocation for migration e07e0aa8-0c00-41d2-b1b6-07ee708e59ff [ 716.343135] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 716.350586] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47ce2aa6-0885-42f2-b828-148f23bea28d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.371325] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 716.371325] env[69994]: value = "task-2925210" [ 716.371325] env[69994]: _type = "Task" [ 716.371325] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.380607] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 716.380607] env[69994]: value = "task-2925211" [ 716.380607] env[69994]: _type = "Task" [ 716.380607] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.384536] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.403195] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925211, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.484832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "db9f7abd-ab45-49a3-9035-695b26756142" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.485118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.485615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "db9f7abd-ab45-49a3-9035-695b26756142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.485798] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.486119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.491383] env[69994]: INFO nova.compute.manager [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Terminating instance [ 716.535318] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 716.647645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "367665db-def4-4148-a316-b83378e00ba8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.647645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.647645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "367665db-def4-4148-a316-b83378e00ba8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.647799] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.647899] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.650926] env[69994]: INFO nova.compute.manager [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Terminating instance [ 716.691573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.691781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.691978] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.727496] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925209, 'name': PowerOffVM_Task, 'duration_secs': 0.22596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.730124] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.730327] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.730728] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10a9e97d-eb64-424b-b81a-86c1e9851c3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.800483] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.800483] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.800702] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Deleting the datastore file [datastore1] e87e1839-9fef-462d-b1ab-842ef76828a4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.801013] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55b8a4b0-4724-4b29-80f5-0e29cb4a99b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.813293] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for the task: (returnval){ [ 716.813293] env[69994]: value = "task-2925213" [ 716.813293] env[69994]: _type = "Task" [ 716.813293] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.826230] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.845904] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53cdc0fe-26f0-4c28-94f5-8dfcad619f52 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 38.428s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.867034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28606447-8f9c-4b47-b2bd-1124e0ba2b81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.881940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68baa6f-c6d6-4a5a-bfe3-bf9acb3ce739 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.889250] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925210, 'name': PowerOffVM_Task, 'duration_secs': 0.286834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.892209] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.892459] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.893017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cea36d8a-4782-461c-9c2f-d7fc03e3ff5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.930345] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af0bc9c-c21f-46f2-800d-8d6c2eac2c3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.933546] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925211, 'name': ReconfigVM_Task, 'duration_secs': 0.331304} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.933821] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Reconfigured VM instance instance-0000001a to attach disk [datastore2] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 716.935208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-878b32cc-fc50-4f8a-8ba2-d62701f10c05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.942089] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6617e1-f4d6-4eae-b530-1431a2f834b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.948028] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 716.948028] env[69994]: value = "task-2925215" [ 716.948028] env[69994]: _type = "Task" [ 716.948028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.959343] env[69994]: DEBUG nova.compute.provider_tree [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.967325] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925215, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.997384] env[69994]: DEBUG nova.compute.manager [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 716.997384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.997975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81889177-74d0-4e55-86ac-dcf1841612fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.006776] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 717.006960] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f292932-d5a3-4741-bf08-76ab433e45c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.014971] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 717.014971] env[69994]: value = "task-2925216" [ 717.014971] env[69994]: _type = "Task" [ 717.014971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.029025] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.057880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.065660] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.066116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.066116] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleting the datastore file [datastore1] c512ee01-7d45-49f0-b2ce-659392527264 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.066384] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02daa9b1-447a-406c-a742-798506b9ad64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.075531] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 717.075531] env[69994]: value = "task-2925217" [ 717.075531] env[69994]: _type = "Task" [ 717.075531] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.084347] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.157291] env[69994]: DEBUG nova.compute.manager [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 717.157533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 717.158700] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7aa340-3118-4e93-8c18-236aec12ee54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.168694] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 717.169119] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47533b6a-50b2-4603-840a-9df1c0c49211 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.179654] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 717.179654] env[69994]: value = "task-2925219" [ 717.179654] env[69994]: _type = "Task" [ 717.179654] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.195086] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.243519] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.324546] env[69994]: DEBUG oslo_vmware.api [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Task: {'id': task-2925213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302346} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.324839] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.325293] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.325771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.326162] env[69994]: INFO nova.compute.manager [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 717.327059] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.327324] env[69994]: DEBUG nova.compute.manager [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 717.327410] env[69994]: DEBUG nova.network.neutron [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 717.442642] env[69994]: DEBUG nova.network.neutron [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Updating instance_info_cache with network_info: [{"id": "7672d351-d6eb-466c-87d1-f7f798da34d4", "address": "fa:16:3e:8f:3d:8d", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7672d351-d6", "ovs_interfaceid": "7672d351-d6eb-466c-87d1-f7f798da34d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.463370] env[69994]: DEBUG nova.scheduler.client.report [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.468332] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925215, 'name': Rename_Task, 'duration_secs': 0.166215} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.469312] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 717.469944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3e02d69-fdc9-4873-b776-c3765d740d5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.480131] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 717.480131] env[69994]: value = "task-2925220" [ 717.480131] env[69994]: _type = "Task" [ 717.480131] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.491711] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.529021] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925216, 'name': PowerOffVM_Task, 'duration_secs': 0.222679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.529529] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 717.529787] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 717.530190] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4688ed15-ff11-474e-b5e2-08aa325bcfdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.587118] env[69994]: DEBUG oslo_vmware.api [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.587435] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.587595] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.587773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.588243] env[69994]: INFO nova.compute.manager [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Took 1.30 seconds to destroy the instance on the hypervisor. [ 717.588533] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.588736] env[69994]: DEBUG nova.compute.manager [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 717.588837] env[69994]: DEBUG nova.network.neutron [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 717.602854] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.602995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.603217] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleting the datastore file [datastore1] db9f7abd-ab45-49a3-9035-695b26756142 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.603646] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38d15763-5f55-46ed-9a5d-d3f816de6b95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.611997] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 717.611997] env[69994]: value = "task-2925222" [ 717.611997] env[69994]: _type = "Task" [ 717.611997] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.624173] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.694105] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925219, 'name': PowerOffVM_Task, 'duration_secs': 0.245876} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.694532] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 717.696180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 717.696495] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d31414c1-f507-430b-a999-bfa2fa39a5bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.779053] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.779053] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.779053] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleting the datastore file [datastore1] 367665db-def4-4148-a316-b83378e00ba8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.779053] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c481be4d-69a8-4d14-9938-1e748443878c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.789659] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 717.789659] env[69994]: value = "task-2925224" [ 717.789659] env[69994]: _type = "Task" [ 717.789659] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.800786] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.944035] env[69994]: DEBUG nova.compute.manager [req-71adeea6-abc7-4521-aa05-cbf42b47ac7b req-357eadbb-ad19-4b14-b0aa-84566a563815 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Received event network-vif-deleted-7f9115c8-be0b-4607-b9e6-69371f8cef21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 717.944764] env[69994]: INFO nova.compute.manager [req-71adeea6-abc7-4521-aa05-cbf42b47ac7b req-357eadbb-ad19-4b14-b0aa-84566a563815 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Neutron deleted interface 7f9115c8-be0b-4607-b9e6-69371f8cef21; detaching it from the instance and deleting it from the info cache [ 717.944764] env[69994]: DEBUG nova.network.neutron [req-71adeea6-abc7-4521-aa05-cbf42b47ac7b req-357eadbb-ad19-4b14-b0aa-84566a563815 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.947181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.947512] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Instance network_info: |[{"id": "7672d351-d6eb-466c-87d1-f7f798da34d4", "address": "fa:16:3e:8f:3d:8d", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7672d351-d6", "ovs_interfaceid": "7672d351-d6eb-466c-87d1-f7f798da34d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.947958] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:3d:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7672d351-d6eb-466c-87d1-f7f798da34d4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.959066] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.960673] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.960852] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db410d0d-3f87-44b9-8b85-998d9b850800 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.980239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.982669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.246s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.982942] env[69994]: DEBUG nova.objects.instance [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lazy-loading 'resources' on Instance uuid 803e9885-000f-4696-9fb9-03361ef46538 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.992604] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.992604] env[69994]: value = "task-2925225" [ 717.992604] env[69994]: _type = "Task" [ 717.992604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.999018] env[69994]: DEBUG oslo_vmware.api [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925220, 'name': PowerOnVM_Task, 'duration_secs': 0.512124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.999873] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 718.000090] env[69994]: INFO nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Took 5.50 seconds to spawn the instance on the hypervisor. [ 718.000268] env[69994]: DEBUG nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.001620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18763a8a-df3c-4497-b69c-9716cf299e16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.014115] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925225, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.018269] env[69994]: INFO nova.scheduler.client.report [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Deleted allocations for instance 9717f586-cedc-4f21-9ea6-7bf6e2991327 [ 718.124716] env[69994]: DEBUG oslo_vmware.api [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180716} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.125118] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 718.125307] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 718.125591] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.125740] env[69994]: INFO nova.compute.manager [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Took 1.13 seconds to destroy the instance on the hypervisor. [ 718.126910] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.126910] env[69994]: DEBUG nova.compute.manager [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 718.126910] env[69994]: DEBUG nova.network.neutron [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.222241] env[69994]: DEBUG nova.compute.manager [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Received event network-changed-7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.222496] env[69994]: DEBUG nova.compute.manager [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Refreshing instance network info cache due to event network-changed-7672d351-d6eb-466c-87d1-f7f798da34d4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 718.222878] env[69994]: DEBUG oslo_concurrency.lockutils [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] Acquiring lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.222983] env[69994]: DEBUG oslo_concurrency.lockutils [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] Acquired lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.223165] env[69994]: DEBUG nova.network.neutron [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Refreshing network info cache for port 7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.260429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.260599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.267514] env[69994]: DEBUG nova.network.neutron [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.304175] env[69994]: DEBUG oslo_vmware.api [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.304539] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 718.304640] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 718.304788] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.305084] env[69994]: INFO nova.compute.manager [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 367665db-def4-4148-a316-b83378e00ba8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 718.305247] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.306062] env[69994]: DEBUG nova.compute.manager [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 718.306062] env[69994]: DEBUG nova.network.neutron [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.447902] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d40549e7-c4dd-4c65-98de-1d90682c9217 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.459947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d8976a-d886-4633-a134-e90a6b2d5134 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.501385] env[69994]: DEBUG nova.compute.manager [req-71adeea6-abc7-4521-aa05-cbf42b47ac7b req-357eadbb-ad19-4b14-b0aa-84566a563815 service nova] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Detach interface failed, port_id=7f9115c8-be0b-4607-b9e6-69371f8cef21, reason: Instance e87e1839-9fef-462d-b1ab-842ef76828a4 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 718.510917] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925225, 'name': CreateVM_Task, 'duration_secs': 0.439719} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.514669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.515445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.515445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.515445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 718.515805] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bada7f80-26cb-42d3-b6e9-0225ebae8b81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.521241] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 718.521241] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52110101-7992-1942-8be3-4559b22a4eaa" [ 718.521241] env[69994]: _type = "Task" [ 718.521241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.537605] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb82c970-3b2d-4c72-b70d-af388e3ad9a4 tempest-InstanceActionsNegativeTestJSON-237685100 tempest-InstanceActionsNegativeTestJSON-237685100-project-member] Lock "9717f586-cedc-4f21-9ea6-7bf6e2991327" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.031s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.540787] env[69994]: INFO nova.compute.manager [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Took 39.97 seconds to build instance. [ 718.545600] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52110101-7992-1942-8be3-4559b22a4eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.010683} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.546084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.546400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.546509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.546821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.546821] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.547086] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30877f5b-cc19-4b36-ac04-8e9cea7e2343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.557747] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.558020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.559255] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-911ccf45-5d4a-46ae-91b2-a2b0043afe52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.565885] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 718.565885] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52699223-cb6d-7ddc-960d-11ece1378e01" [ 718.565885] env[69994]: _type = "Task" [ 718.565885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.578993] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52699223-cb6d-7ddc-960d-11ece1378e01, 'name': SearchDatastore_Task, 'duration_secs': 0.009716} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.579793] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3640dc60-b9dc-4aae-8b8c-66ce63cccb01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.585230] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 718.585230] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52216116-d923-e7e1-6ac5-8614e9d7c848" [ 718.585230] env[69994]: _type = "Task" [ 718.585230] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.598969] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52216116-d923-e7e1-6ac5-8614e9d7c848, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.598969] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.598969] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7/2358d8f6-7fbc-4f30-93ad-27f4d96aefa7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 718.598969] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ad7e9c4-71b0-4577-920b-8867f8ed935c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.608012] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 718.608012] env[69994]: value = "task-2925226" [ 718.608012] env[69994]: _type = "Task" [ 718.608012] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.616808] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.775922] env[69994]: INFO nova.compute.manager [-] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Took 1.45 seconds to deallocate network for instance. [ 719.013792] env[69994]: DEBUG nova.network.neutron [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.018972] env[69994]: DEBUG nova.network.neutron [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.044378] env[69994]: DEBUG nova.network.neutron [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Updated VIF entry in instance network info cache for port 7672d351-d6eb-466c-87d1-f7f798da34d4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.044772] env[69994]: DEBUG nova.network.neutron [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Updating instance_info_cache with network_info: [{"id": "7672d351-d6eb-466c-87d1-f7f798da34d4", "address": "fa:16:3e:8f:3d:8d", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7672d351-d6", "ovs_interfaceid": "7672d351-d6eb-466c-87d1-f7f798da34d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.047907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c5dda00-6204-493d-90cf-64b4c388c42b tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.203s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.069774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10609e84-edea-493d-90e7-5cf3b3fded9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.079741] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a448e2e-dc71-4198-8c97-803bfcccbd5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.123048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809ed4df-0915-4b3b-b141-5f0cd2cb9f7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.128548] env[69994]: DEBUG nova.network.neutron [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.136019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15155cc-ff02-49e0-9797-bcf789b629e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.137953] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.139329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7/2358d8f6-7fbc-4f30-93ad-27f4d96aefa7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.139329] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.139958] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1347cfe-65b6-41a0-a662-a8e944e4a238 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.150918] env[69994]: DEBUG nova.compute.provider_tree [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.154767] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 719.154767] env[69994]: value = "task-2925227" [ 719.154767] env[69994]: _type = "Task" [ 719.154767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.165157] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.285849] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.518389] env[69994]: INFO nova.compute.manager [-] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Took 1.39 seconds to deallocate network for instance. [ 719.522230] env[69994]: INFO nova.compute.manager [-] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Took 1.93 seconds to deallocate network for instance. [ 719.552717] env[69994]: DEBUG oslo_concurrency.lockutils [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] Releasing lock "refresh_cache-2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.553016] env[69994]: DEBUG nova.compute.manager [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-deleted-baab9a7d-282b-4491-baef-b768fcba09be {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.553225] env[69994]: INFO nova.compute.manager [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Neutron deleted interface baab9a7d-282b-4491-baef-b768fcba09be; detaching it from the instance and deleting it from the info cache [ 719.554483] env[69994]: DEBUG nova.network.neutron [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [{"id": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "address": "fa:16:3e:97:da:bf", "network": {"id": "f3ea56f2-4504-432f-8c1d-ed3f45470639", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1891970213", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ba05e4e-75", "ovs_interfaceid": "8ba05e4e-7501-48a5-9844-c7f0fc30072c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "address": "fa:16:3e:52:aa:68", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ccd0f30-a9", "ovs_interfaceid": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.557573] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.639538] env[69994]: INFO nova.compute.manager [-] [instance: 367665db-def4-4148-a316-b83378e00ba8] Took 1.33 seconds to deallocate network for instance. [ 719.656944] env[69994]: DEBUG nova.scheduler.client.report [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.673428] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.673710] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.674559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85299542-7444-498f-841b-46784aa88a62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.700331] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7/2358d8f6-7fbc-4f30-93ad-27f4d96aefa7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.700958] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40306ec4-38ba-427d-92c0-2d26d084adca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.722226] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 719.722226] env[69994]: value = "task-2925229" [ 719.722226] env[69994]: _type = "Task" [ 719.722226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.733043] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925229, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.992205] env[69994]: DEBUG nova.compute.manager [req-eaedfac6-5606-4b0c-add3-83a0aafd9f57 req-ddd7a101-3938-4e8c-81d8-ad06f6a4559d service nova] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Received event network-vif-deleted-e7eb12ca-e58a-4b85-acd5-5b8d14209edc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.992440] env[69994]: DEBUG nova.compute.manager [req-eaedfac6-5606-4b0c-add3-83a0aafd9f57 req-ddd7a101-3938-4e8c-81d8-ad06f6a4559d service nova] [instance: 367665db-def4-4148-a316-b83378e00ba8] Received event network-vif-deleted-34344b41-5493-4a10-b542-c94483d0abfb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.032991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.033890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.056698] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94dbb532-ab70-4190-abed-15c76d653da7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.071774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9945093-b7d6-4426-bfab-e392c66285ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.113921] env[69994]: DEBUG nova.compute.manager [req-582c17de-80c4-4551-be5a-74b3c448eda1 req-ce9b73dd-f0a9-4e18-bb57-5354e9eac4ad service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Detach interface failed, port_id=baab9a7d-282b-4491-baef-b768fcba09be, reason: Instance c512ee01-7d45-49f0-b2ce-659392527264 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 720.116139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.146815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.168518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.171502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.048s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.173620] env[69994]: INFO nova.compute.claims [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.189112] env[69994]: INFO nova.scheduler.client.report [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted allocations for instance 803e9885-000f-4696-9fb9-03361ef46538 [ 720.233615] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.269352] env[69994]: DEBUG nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-deleted-8ba05e4e-7501-48a5-9844-c7f0fc30072c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.269460] env[69994]: INFO nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Neutron deleted interface 8ba05e4e-7501-48a5-9844-c7f0fc30072c; detaching it from the instance and deleting it from the info cache [ 720.269714] env[69994]: DEBUG nova.network.neutron [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [{"id": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "address": "fa:16:3e:52:aa:68", "network": {"id": "ed4b6a9e-2943-4332-b135-498405bc2cf0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2027309304", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ccd0f30-a9", "ovs_interfaceid": "1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.592382] env[69994]: INFO nova.compute.manager [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Rebuilding instance [ 720.652332] env[69994]: DEBUG nova.compute.manager [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.653758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58d6bf4-809d-4551-8526-0f41835d9b2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.698492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95e0612a-cba9-4d0d-bd13-c2a3b787ce16 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "803e9885-000f-4696-9fb9-03361ef46538" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.734390] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925229, 'name': ReconfigVM_Task, 'duration_secs': 0.832279} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.734711] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7/2358d8f6-7fbc-4f30-93ad-27f4d96aefa7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.735420] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bf1d8e4-3b92-4786-b441-e7eac7fcc9e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.744515] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 720.744515] env[69994]: value = "task-2925230" [ 720.744515] env[69994]: _type = "Task" [ 720.744515] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.756072] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925230, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.773024] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-def750c8-d8b5-4666-a27d-f4dab1915787 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.786272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e37b15-c4ee-4447-b77e-209ed27250c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.822107] env[69994]: DEBUG nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Detach interface failed, port_id=8ba05e4e-7501-48a5-9844-c7f0fc30072c, reason: Instance c512ee01-7d45-49f0-b2ce-659392527264 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 720.822432] env[69994]: DEBUG nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Received event network-vif-deleted-1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.822635] env[69994]: INFO nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Neutron deleted interface 1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7; detaching it from the instance and deleting it from the info cache [ 720.822834] env[69994]: DEBUG nova.network.neutron [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.256157] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925230, 'name': Rename_Task, 'duration_secs': 0.235094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.259037] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.259654] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a43ade7-2ec2-4b2c-96b9-8ecbb6d1c095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.271782] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 721.271782] env[69994]: value = "task-2925232" [ 721.271782] env[69994]: _type = "Task" [ 721.271782] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.288507] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.331714] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b820c36-3b8a-4c10-864d-f8f25ef8b9a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.344984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d54430-8f25-467c-8fd5-7f84d3575a2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.391701] env[69994]: DEBUG nova.compute.manager [req-058cb03b-aeb3-44cd-b435-b207c907a983 req-07e327d4-ed2e-4ebe-b50f-b23b5b2d3312 service nova] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Detach interface failed, port_id=1ccd0f30-a96d-4ec8-a5d7-f0267cd449d7, reason: Instance c512ee01-7d45-49f0-b2ce-659392527264 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 721.671161] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 721.671635] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7190b8ee-fb4d-4390-925c-4f66b69cb34d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.681872] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 721.681872] env[69994]: value = "task-2925233" [ 721.681872] env[69994]: _type = "Task" [ 721.681872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.693044] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.725201] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fdb199-f47f-4c3f-8c5d-eafdab23cea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.733483] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a231824a-0cbb-4a9e-9ee4-4eb908d6b646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.765799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753fbfa6-6a04-4b25-93bf-1856d98ae1ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.778384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d164205d-341e-462a-a3b4-829ebd827f58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.798553] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925232, 'name': PowerOnVM_Task} progress is 79%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.799103] env[69994]: DEBUG nova.compute.provider_tree [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.192063] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925233, 'name': PowerOffVM_Task, 'duration_secs': 0.124489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.192358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 722.193054] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.193798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc856bd-d20c-4012-94bb-56f01d9e4f53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.200866] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.201114] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9239d2ec-5da3-4de5-bf6f-b3a720fe186e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.231647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 722.231898] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 722.232078] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Deleting the datastore file [datastore2] f36c29d1-b945-4afe-abbd-431e59de7cec {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.232523] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11117cff-8516-4634-ac65-d9e87566944b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.239888] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 722.239888] env[69994]: value = "task-2925235" [ 722.239888] env[69994]: _type = "Task" [ 722.239888] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.248058] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.283981] env[69994]: DEBUG oslo_vmware.api [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925232, 'name': PowerOnVM_Task, 'duration_secs': 0.717035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.284722] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 722.284722] env[69994]: INFO nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Took 7.13 seconds to spawn the instance on the hypervisor. [ 722.284722] env[69994]: DEBUG nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 722.285521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1d968e-cf17-49ba-9cc2-b612fa0e44dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.303733] env[69994]: DEBUG nova.scheduler.client.report [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.667233] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.667812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.755153] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135626} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.755153] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 722.755153] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 722.755153] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.805799] env[69994]: INFO nova.compute.manager [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Took 42.65 seconds to build instance. [ 722.811455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.815022] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 722.817135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.024s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.818753] env[69994]: INFO nova.compute.claims [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.311596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eab53b02-8cc7-46dc-bd5a-0a610653e3be tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.805s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.328283] env[69994]: DEBUG nova.compute.utils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 723.329907] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 723.790226] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 723.790507] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.790507] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 723.790617] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 723.791608] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 723.791986] env[69994]: DEBUG nova.virt.hardware [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 723.792796] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca5ab1c-d7a9-4e21-896d-c312221a57f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.801752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef59042-c104-4bc0-ae9b-8ebaf10e6715 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.816560] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 723.819254] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.826613] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.826924] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.827160] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5b148d3-d2d9-4402-a8d7-8c26419a7e37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.841902] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 723.851029] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.851029] env[69994]: value = "task-2925237" [ 723.851029] env[69994]: _type = "Task" [ 723.851029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.860357] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925237, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.288279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.288504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.356612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.373890] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925237, 'name': CreateVM_Task, 'duration_secs': 0.343249} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.374163] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.375116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.375116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.375431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.379777] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81bce57-3706-46d4-a326-b329b8eca407 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.385202] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 724.385202] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f1f48-41f3-eb68-b6ec-04beda1c40c6" [ 724.385202] env[69994]: _type = "Task" [ 724.385202] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.394402] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f1f48-41f3-eb68-b6ec-04beda1c40c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.398271] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724bb0f5-2c3f-42e1-8aa3-43a6aa55837e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.406787] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5902e8d4-d19b-4b15-ac05-921693a02380 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.438739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b01d5a-fb4a-4a42-b0b5-b68ffb3fff9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.448109] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52425cc8-9d50-4bfc-89f5-d4c47c990bd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.462756] env[69994]: DEBUG nova.compute.provider_tree [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.638736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "956306bc-4701-4c04-8221-8ec0b9df73ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.639008] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.865910] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 724.896330] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.896704] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.898439] env[69994]: DEBUG nova.virt.hardware [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.899399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737c04e0-1c3c-4e2f-bbad-26c9458f56f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.906396] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f1f48-41f3-eb68-b6ec-04beda1c40c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011408} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.907089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.907320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.907551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.907692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.907863] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.908137] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca790a61-7cb1-4420-9a71-918e9ffc67cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.913276] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2fbacd-cf7c-43c1-aaf7-7deef182ae82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.919254] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.919414] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 724.920939] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741f13c2-b6e8-459d-8945-5371bb675d7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.931192] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.936896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Creating folder: Project (536407139c594cb8841d589b7ead89a4). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.937647] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07004280-1084-4e26-8290-983c80ce3dd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.940982] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 724.940982] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5209fdfd-a445-ccdd-2d3f-cd228357b8c9" [ 724.940982] env[69994]: _type = "Task" [ 724.940982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.949941] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5209fdfd-a445-ccdd-2d3f-cd228357b8c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.951305] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Created folder: Project (536407139c594cb8841d589b7ead89a4) in parent group-v587342. [ 724.951490] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Creating folder: Instances. Parent ref: group-v587433. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.951812] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77a9f75f-7c30-4f01-ada6-7179857d5554 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.961441] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Created folder: Instances in parent group-v587433. [ 724.961681] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.961938] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.962116] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-553d4a6d-6d6f-453e-9a21-2d3524a375f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.974269] env[69994]: DEBUG nova.scheduler.client.report [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.982520] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.982520] env[69994]: value = "task-2925240" [ 724.982520] env[69994]: _type = "Task" [ 724.982520] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.992779] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925240, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.451257] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5209fdfd-a445-ccdd-2d3f-cd228357b8c9, 'name': SearchDatastore_Task, 'duration_secs': 0.011227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.452078] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf02bc9-ead4-484a-b44b-f5ebb0b79b42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.459024] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 725.459024] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fef2d-27e4-68e5-a112-98769fde4268" [ 725.459024] env[69994]: _type = "Task" [ 725.459024] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.466131] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fef2d-27e4-68e5-a112-98769fde4268, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.479217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.479726] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 725.482451] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.896s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.483759] env[69994]: INFO nova.compute.claims [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.495485] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925240, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.968467] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fef2d-27e4-68e5-a112-98769fde4268, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.968782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.968989] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.969287] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d73337b9-c73e-438a-9c62-0305612ae941 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.977630] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 725.977630] env[69994]: value = "task-2925241" [ 725.977630] env[69994]: _type = "Task" [ 725.977630] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.988631] env[69994]: DEBUG nova.compute.utils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 725.993329] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.993329] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.993329] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 726.002827] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925240, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.047597] env[69994]: DEBUG nova.policy [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c345e7849e994e38b9b5a050255115a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3185ee244cc74a8896b062af9d4e1478', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 726.473386] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Successfully created port: c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.488431] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439078} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.488780] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.489025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.489302] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-177a72c5-0f0a-421e-9745-39f13a928923 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.492968] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 726.504266] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 726.504266] env[69994]: value = "task-2925242" [ 726.504266] env[69994]: _type = "Task" [ 726.504266] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.514129] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925240, 'name': CreateVM_Task, 'duration_secs': 1.333136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.514868] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.515465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.516811] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.517428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 726.521065] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be95f59-553c-4eb5-a28a-6fbf303c4ff2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.523998] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.532432] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 726.532432] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9d478-14f7-a2bd-5021-567c4e393972" [ 726.532432] env[69994]: _type = "Task" [ 726.532432] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.543572] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b9d478-14f7-a2bd-5021-567c4e393972, 'name': SearchDatastore_Task, 'duration_secs': 0.011045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.547622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.548031] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.548386] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.548665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.548960] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.549542] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3bcb8cb-4e05-479a-a43f-89a6dac1e884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.562297] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.562297] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.563287] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbfcd88e-8e4a-4a58-a087-3ec0eb73df31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.572574] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 726.572574] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528fd3b4-3378-7325-3bc9-198e2d270bde" [ 726.572574] env[69994]: _type = "Task" [ 726.572574] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.583476] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528fd3b4-3378-7325-3bc9-198e2d270bde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.003859] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ff74a-5a04-41a3-bc72-e5c5c6186bc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.026590] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064549} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.028555] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.029426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dcf802-1f6a-4044-a8b7-21bd163871c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.032843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7475679d-e0dd-449a-8127-581e75223255 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.054997] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.083780] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5dbaa0-fd8d-40a3-a871-22bb832f6482 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.102672] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec41b0c-f678-4701-a9bd-0def42fb473e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.111929] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528fd3b4-3378-7325-3bc9-198e2d270bde, 'name': SearchDatastore_Task, 'duration_secs': 0.010175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.115664] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 727.115664] env[69994]: value = "task-2925243" [ 727.115664] env[69994]: _type = "Task" [ 727.115664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.116317] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-696fe925-4ea7-4255-b4cc-7398f7f316fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.119142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f7f071-ec3a-418e-8e5d-01962f081d23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.135293] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925243, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.142131] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 727.142131] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52107776-8bde-96e6-e3c0-f99cfd41e819" [ 727.142131] env[69994]: _type = "Task" [ 727.142131] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.142691] env[69994]: DEBUG nova.compute.provider_tree [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.153089] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52107776-8bde-96e6-e3c0-f99cfd41e819, 'name': SearchDatastore_Task, 'duration_secs': 0.013598} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.153598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.153851] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.154116] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a8f4a67-9d52-4d4d-856e-bc29cba953a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.162114] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 727.162114] env[69994]: value = "task-2925244" [ 727.162114] env[69994]: _type = "Task" [ 727.162114] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.171238] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.521345] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 727.557624] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 727.558080] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.558359] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 727.559717] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.560425] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 727.560708] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 727.561111] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 727.561579] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 727.561987] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 727.562379] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 727.562705] env[69994]: DEBUG nova.virt.hardware [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 727.564069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f28c73-7392-42a2-8abd-8507d1cc5891 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.577823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eefccf-f90f-4100-adc9-82efc413c4c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.615932] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.616208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.629459] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925243, 'name': ReconfigVM_Task, 'duration_secs': 0.317663} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.629459] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Reconfigured VM instance instance-0000001a to attach disk [datastore1] f36c29d1-b945-4afe-abbd-431e59de7cec/f36c29d1-b945-4afe-abbd-431e59de7cec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.630038] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f26f47c-4131-4d24-92f5-531b180bf995 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.636229] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 727.636229] env[69994]: value = "task-2925245" [ 727.636229] env[69994]: _type = "Task" [ 727.636229] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.644181] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925245, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.654589] env[69994]: DEBUG nova.scheduler.client.report [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 727.672491] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.672491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.672727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.672925] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98c36e17-2c80-4603-a990-26fbf32b9db9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.680858] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 727.680858] env[69994]: value = "task-2925246" [ 727.680858] env[69994]: _type = "Task" [ 727.680858] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.691336] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.020035] env[69994]: DEBUG nova.compute.manager [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Received event network-vif-plugged-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.020300] env[69994]: DEBUG oslo_concurrency.lockutils [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.020492] env[69994]: DEBUG oslo_concurrency.lockutils [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.020663] env[69994]: DEBUG oslo_concurrency.lockutils [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.020829] env[69994]: DEBUG nova.compute.manager [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] No waiting events found dispatching network-vif-plugged-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 728.020991] env[69994]: WARNING nova.compute.manager [req-bafab221-cd54-49e9-83f8-6147863f0d91 req-f50bd356-9dbf-4004-a844-2089ee579974 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Received unexpected event network-vif-plugged-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df for instance with vm_state building and task_state spawning. [ 728.156283] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925245, 'name': Rename_Task, 'duration_secs': 0.147617} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.157372] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Successfully updated port: c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 728.158647] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 728.159567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.160053] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 728.162725] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ee91ee0-8a1f-4abc-8ba5-1424c59c55a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.166610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.867s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.167737] env[69994]: DEBUG nova.objects.instance [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'resources' on Instance uuid f3ae584d-18a5-4bbe-b4bf-860e2332b324 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.175922] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 728.175922] env[69994]: value = "task-2925247" [ 728.175922] env[69994]: _type = "Task" [ 728.175922] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.188085] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.194295] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184954} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.194295] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.195177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb28bae0-2238-4111-9afb-c861fc3a4667 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.217871] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.218503] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10d38af6-5940-4bcc-b221-3d161aad9c77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.239983] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 728.239983] env[69994]: value = "task-2925248" [ 728.239983] env[69994]: _type = "Task" [ 728.239983] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.249226] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.663977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.663977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.665213] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.671234] env[69994]: DEBUG nova.compute.utils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 728.679259] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 728.679456] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 728.696105] env[69994]: DEBUG oslo_vmware.api [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925247, 'name': PowerOnVM_Task, 'duration_secs': 0.479398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.696105] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.696105] env[69994]: DEBUG nova.compute.manager [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.696105] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f0684b-a476-4fb0-9220-ceafc365a88f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.752146] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925248, 'name': ReconfigVM_Task, 'duration_secs': 0.351852} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.755021] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.755843] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6149f280-4319-43c6-9cb3-2b6d3c634e39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.763909] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 728.763909] env[69994]: value = "task-2925249" [ 728.763909] env[69994]: _type = "Task" [ 728.763909] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.765231] env[69994]: DEBUG nova.policy [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2926ede2b398473b9a6d51e53912f26e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '569fef1e170f4ca4b91dda2282e58d79', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 728.778187] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925249, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.074547] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Successfully created port: 03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.174370] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 729.206963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbaca73-c428-464e-b87f-3d4fa0a4e4af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.223380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379b3131-fed1-4f55-a46d-ee5b34d9bb2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.225666] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.232537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.264772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b170c9-4094-4da6-b932-deef06bb3456 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.281839] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd73712-f41e-41b7-8340-f95d896d3455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.285614] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925249, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.298020] env[69994]: DEBUG nova.compute.provider_tree [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.431811] env[69994]: DEBUG nova.network.neutron [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating instance_info_cache with network_info: [{"id": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "address": "fa:16:3e:3e:df:14", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e0f8d2-cf", "ovs_interfaceid": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.652648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "f36c29d1-b945-4afe-abbd-431e59de7cec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.653110] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.653266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "f36c29d1-b945-4afe-abbd-431e59de7cec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.653457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.653642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.656145] env[69994]: INFO nova.compute.manager [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Terminating instance [ 729.779775] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925249, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.801272] env[69994]: DEBUG nova.scheduler.client.report [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.934750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.935285] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Instance network_info: |[{"id": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "address": "fa:16:3e:3e:df:14", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e0f8d2-cf", "ovs_interfaceid": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 729.935878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:df:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8e0f8d2-cf57-4669-8fe4-3fec8b7232df', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.950298] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Creating folder: Project (3185ee244cc74a8896b062af9d4e1478). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.950770] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-133d6898-91cc-44aa-88f8-545600b2ded5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.963013] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Created folder: Project (3185ee244cc74a8896b062af9d4e1478) in parent group-v587342. [ 729.963215] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Creating folder: Instances. Parent ref: group-v587436. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.963453] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9998bb8d-3a6c-4618-9ab7-2c05dbaf6d9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.974511] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Created folder: Instances in parent group-v587436. [ 729.974747] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.974938] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 729.975557] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa62dae0-8cf7-4729-b22e-5b3dd5fd3071 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.995715] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.995715] env[69994]: value = "task-2925252" [ 729.995715] env[69994]: _type = "Task" [ 729.995715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.002712] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925252, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.049643] env[69994]: DEBUG nova.compute.manager [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Received event network-changed-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.050009] env[69994]: DEBUG nova.compute.manager [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Refreshing instance network info cache due to event network-changed-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 730.050096] env[69994]: DEBUG oslo_concurrency.lockutils [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] Acquiring lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.050212] env[69994]: DEBUG oslo_concurrency.lockutils [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] Acquired lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.050366] env[69994]: DEBUG nova.network.neutron [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Refreshing network info cache for port c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.159799] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "refresh_cache-f36c29d1-b945-4afe-abbd-431e59de7cec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.160171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquired lock "refresh_cache-f36c29d1-b945-4afe-abbd-431e59de7cec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.160239] env[69994]: DEBUG nova.network.neutron [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.189718] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 730.215276] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.215612] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.215772] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.215954] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.216115] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.216265] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.216472] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.216647] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.216848] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.217035] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.217234] env[69994]: DEBUG nova.virt.hardware [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.218090] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d763ee-1ae8-4637-896a-24d9dc1d3dec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.226883] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd09eff-7b99-474c-b495-0b59dd486379 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.279724] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925249, 'name': Rename_Task, 'duration_secs': 1.419453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.279940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.280274] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-607bf978-e130-4104-acba-48360e22345e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.287922] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 730.287922] env[69994]: value = "task-2925253" [ 730.287922] env[69994]: _type = "Task" [ 730.287922] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.296991] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925253, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.310551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.313906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.019s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.314196] env[69994]: DEBUG nova.objects.instance [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lazy-loading 'resources' on Instance uuid 55dd32b0-e67f-4943-86e8-b9956267fedc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.338755] env[69994]: INFO nova.scheduler.client.report [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted allocations for instance f3ae584d-18a5-4bbe-b4bf-860e2332b324 [ 730.507626] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925252, 'name': CreateVM_Task, 'duration_secs': 0.407505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.507805] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.512641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.512809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.514355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.514651] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7b5177c-eb01-4041-adf2-9c37d9a66bad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.520377] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 730.520377] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520426ca-20e7-d81e-ad5d-bfc41cc66894" [ 730.520377] env[69994]: _type = "Task" [ 730.520377] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.529303] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520426ca-20e7-d81e-ad5d-bfc41cc66894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.686908] env[69994]: DEBUG nova.network.neutron [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.727159] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Successfully updated port: 03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.757838] env[69994]: DEBUG nova.network.neutron [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.798973] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925253, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.817159] env[69994]: DEBUG nova.network.neutron [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updated VIF entry in instance network info cache for port c8e0f8d2-cf57-4669-8fe4-3fec8b7232df. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.817636] env[69994]: DEBUG nova.network.neutron [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating instance_info_cache with network_info: [{"id": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "address": "fa:16:3e:3e:df:14", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e0f8d2-cf", "ovs_interfaceid": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.851630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-313bc97f-06c0-4062-9e0e-8b64164abb83 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "f3ae584d-18a5-4bbe-b4bf-860e2332b324" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.201s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.032305] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520426ca-20e7-d81e-ad5d-bfc41cc66894, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.032635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.032865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.033112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.033250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.033428] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.033688] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d560c03-333c-49df-9728-346cb7859f9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.043491] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.043673] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.044469] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b15eacb-4bef-487f-92ac-b1029e93116b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.052686] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 731.052686] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d1ffd-cd2c-1b0a-1813-ad9ef0295784" [ 731.052686] env[69994]: _type = "Task" [ 731.052686] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.061631] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d1ffd-cd2c-1b0a-1813-ad9ef0295784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.231127] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.231445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.231550] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.260125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Releasing lock "refresh_cache-f36c29d1-b945-4afe-abbd-431e59de7cec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.260685] env[69994]: DEBUG nova.compute.manager [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 731.260990] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.262879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a454de-3ec5-4615-b633-bb6d235bc904 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.282366] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.282758] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80b3f162-971c-4085-a54d-242101d35b8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.293727] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 731.293727] env[69994]: value = "task-2925254" [ 731.293727] env[69994]: _type = "Task" [ 731.293727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.300418] env[69994]: DEBUG oslo_vmware.api [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925253, 'name': PowerOnVM_Task, 'duration_secs': 0.943905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.301995] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.302239] env[69994]: INFO nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Took 6.44 seconds to spawn the instance on the hypervisor. [ 731.302494] env[69994]: DEBUG nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.303265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3816a30e-c234-4ba5-8fbd-d67ba651da9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.309287] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd862e38-4885-4581-b5a4-11b9b91365a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.312254] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.319116] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d25df4-6292-4421-b818-691df9af3a3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.325044] env[69994]: DEBUG oslo_concurrency.lockutils [req-acbfc6ec-a99a-4240-9051-0081b24c8f08 req-0f895f77-8911-4283-ae39-b989abd834e9 service nova] Releasing lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.358423] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3a331e-1741-4a1e-803a-927f694078bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.368131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c81867-9cf1-4657-a076-b421f4258e03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.389760] env[69994]: DEBUG nova.compute.provider_tree [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.563446] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d1ffd-cd2c-1b0a-1813-ad9ef0295784, 'name': SearchDatastore_Task, 'duration_secs': 0.010438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.564292] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1eb8610-f9e5-4a0d-b5b8-eba26d736d18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.570516] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 731.570516] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dab2c6-22a0-5d50-038f-13dee888fbdd" [ 731.570516] env[69994]: _type = "Task" [ 731.570516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.578882] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dab2c6-22a0-5d50-038f-13dee888fbdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.762580] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.803940] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925254, 'name': PowerOffVM_Task, 'duration_secs': 0.191107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.803940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.803940] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.804170] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da393928-8bea-4997-82e4-cc353c2c1617 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.832891] env[69994]: INFO nova.compute.manager [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Took 40.74 seconds to build instance. [ 731.835093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.835315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.835519] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Deleting the datastore file [datastore1] f36c29d1-b945-4afe-abbd-431e59de7cec {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.838465] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df4f2795-76ee-4e17-9c9b-bc6c63925d00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.846888] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for the task: (returnval){ [ 731.846888] env[69994]: value = "task-2925256" [ 731.846888] env[69994]: _type = "Task" [ 731.846888] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.859194] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.894200] env[69994]: DEBUG nova.scheduler.client.report [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.940441] env[69994]: DEBUG nova.network.neutron [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Updating instance_info_cache with network_info: [{"id": "03117278-4ae8-434d-8093-80636a495ec6", "address": "fa:16:3e:17:77:0a", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03117278-4a", "ovs_interfaceid": "03117278-4ae8-434d-8093-80636a495ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.082604] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dab2c6-22a0-5d50-038f-13dee888fbdd, 'name': SearchDatastore_Task, 'duration_secs': 0.010597} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.082902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.083211] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] b00d09ea-5eee-47ed-adcb-288cdd362e89/b00d09ea-5eee-47ed-adcb-288cdd362e89.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.083773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-058f2891-0eaa-4382-a742-d57cb616b08c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.091718] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 732.091718] env[69994]: value = "task-2925257" [ 732.091718] env[69994]: _type = "Task" [ 732.091718] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.100192] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.223643] env[69994]: DEBUG nova.compute.manager [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Received event network-vif-plugged-03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.223776] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Acquiring lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.224043] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.224217] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.224379] env[69994]: DEBUG nova.compute.manager [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] No waiting events found dispatching network-vif-plugged-03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.224578] env[69994]: WARNING nova.compute.manager [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Received unexpected event network-vif-plugged-03117278-4ae8-434d-8093-80636a495ec6 for instance with vm_state building and task_state spawning. [ 732.225293] env[69994]: DEBUG nova.compute.manager [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Received event network-changed-03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.225293] env[69994]: DEBUG nova.compute.manager [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Refreshing instance network info cache due to event network-changed-03117278-4ae8-434d-8093-80636a495ec6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 732.225293] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Acquiring lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.337038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf4e2a8f-27d5-4f4c-9f3d-f5cba5c18090 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.365575] env[69994]: DEBUG oslo_vmware.api [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Task: {'id': task-2925256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1025} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.365957] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.366205] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.366421] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.366693] env[69994]: INFO nova.compute.manager [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Took 1.11 seconds to destroy the instance on the hypervisor. [ 732.366985] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.367252] env[69994]: DEBUG nova.compute.manager [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 732.367447] env[69994]: DEBUG nova.network.neutron [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.384815] env[69994]: DEBUG nova.network.neutron [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.398414] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.402038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.588s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.402990] env[69994]: INFO nova.compute.claims [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.422926] env[69994]: INFO nova.scheduler.client.report [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Deleted allocations for instance 55dd32b0-e67f-4943-86e8-b9956267fedc [ 732.443259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.443928] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Instance network_info: |[{"id": "03117278-4ae8-434d-8093-80636a495ec6", "address": "fa:16:3e:17:77:0a", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03117278-4a", "ovs_interfaceid": "03117278-4ae8-434d-8093-80636a495ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 732.444710] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Acquired lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.444710] env[69994]: DEBUG nova.network.neutron [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Refreshing network info cache for port 03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.446056] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:77:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03117278-4ae8-434d-8093-80636a495ec6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.455175] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.455755] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.456027] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab3c159a-7b17-4934-9d13-8b5ce22f1c23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.481945] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.481945] env[69994]: value = "task-2925258" [ 732.481945] env[69994]: _type = "Task" [ 732.481945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.494700] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925258, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.604024] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925257, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451417} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.604024] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] b00d09ea-5eee-47ed-adcb-288cdd362e89/b00d09ea-5eee-47ed-adcb-288cdd362e89.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 732.604422] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.604422] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6ed62b4-572b-48e5-aade-84a49aa73513 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.612122] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 732.612122] env[69994]: value = "task-2925259" [ 732.612122] env[69994]: _type = "Task" [ 732.612122] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.623880] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925259, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.690149] env[69994]: INFO nova.compute.manager [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Rebuilding instance [ 732.736016] env[69994]: DEBUG nova.compute.manager [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.736921] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfef868d-869d-4186-ba53-3df05e920153 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.839772] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.887433] env[69994]: DEBUG nova.network.neutron [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.931885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd4db79e-352c-4054-9cde-c7443958a0b8 tempest-ServerGroupTestJSON-1725733079 tempest-ServerGroupTestJSON-1725733079-project-member] Lock "55dd32b0-e67f-4943-86e8-b9956267fedc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.186s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.994044] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925258, 'name': CreateVM_Task, 'duration_secs': 0.373186} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.994044] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.994709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.994873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.995199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.995465] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4170155d-1755-49aa-9fbe-56271c74b6b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.002223] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 733.002223] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52349215-661b-962a-ba1a-f932273ad1dc" [ 733.002223] env[69994]: _type = "Task" [ 733.002223] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.010563] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52349215-661b-962a-ba1a-f932273ad1dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.122708] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925259, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076162} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.122971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.123777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc3271a-bc2d-4d97-a4be-00df38dbd3b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.147543] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] b00d09ea-5eee-47ed-adcb-288cdd362e89/b00d09ea-5eee-47ed-adcb-288cdd362e89.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.147873] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ef819c7-e046-4b0a-b767-2355a51a1c35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.179083] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 733.179083] env[69994]: value = "task-2925260" [ 733.179083] env[69994]: _type = "Task" [ 733.179083] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.191726] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925260, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.202100] env[69994]: DEBUG nova.network.neutron [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Updated VIF entry in instance network info cache for port 03117278-4ae8-434d-8093-80636a495ec6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 733.202644] env[69994]: DEBUG nova.network.neutron [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Updating instance_info_cache with network_info: [{"id": "03117278-4ae8-434d-8093-80636a495ec6", "address": "fa:16:3e:17:77:0a", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03117278-4a", "ovs_interfaceid": "03117278-4ae8-434d-8093-80636a495ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.367185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.390109] env[69994]: INFO nova.compute.manager [-] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Took 1.02 seconds to deallocate network for instance. [ 733.516450] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52349215-661b-962a-ba1a-f932273ad1dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011032} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.516580] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.517487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.517487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.517487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.517487] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.517740] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9218666-82d4-4422-a244-de5d0eea2c50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.527399] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.527567] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 733.528405] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba2fd368-12fe-4e4a-ab54-e5dca619c5c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.538526] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 733.538526] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cc9687-5a65-4b17-c678-0ff1d0f56bc6" [ 733.538526] env[69994]: _type = "Task" [ 733.538526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.547385] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cc9687-5a65-4b17-c678-0ff1d0f56bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.695756] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925260, 'name': ReconfigVM_Task, 'duration_secs': 0.414424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.695756] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfigured VM instance instance-0000001d to attach disk [datastore1] b00d09ea-5eee-47ed-adcb-288cdd362e89/b00d09ea-5eee-47ed-adcb-288cdd362e89.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.695756] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41229570-8367-40fe-98c7-905b7eb00f89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.705501] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec7f9f48-b47f-4349-8f5b-ef10074b7323 req-2ab63a41-f6b0-4009-a0aa-08557889efe6 service nova] Releasing lock "refresh_cache-2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.705977] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 733.705977] env[69994]: value = "task-2925261" [ 733.705977] env[69994]: _type = "Task" [ 733.705977] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.723581] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925261, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.754453] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.755151] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e99799d-d36f-4211-a591-6e33e4da4284 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.768940] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 733.768940] env[69994]: value = "task-2925262" [ 733.768940] env[69994]: _type = "Task" [ 733.768940] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.780636] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.898737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.964291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdc5833-80c6-42cb-99eb-bd1ae3560570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.973276] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ca1527-c625-4f06-9b20-40436d789d3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.007520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d66e01a-cf35-4274-a653-13e978e15491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.019209] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9844db-9a7d-4a09-97d3-b572d9f63227 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.033973] env[69994]: DEBUG nova.compute.provider_tree [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.049730] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cc9687-5a65-4b17-c678-0ff1d0f56bc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.050660] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11199f43-1a6b-4555-94d6-626b769fdbcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.056474] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 734.056474] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b38f9d-0cd5-0f71-8785-cb849e820f61" [ 734.056474] env[69994]: _type = "Task" [ 734.056474] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.065630] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b38f9d-0cd5-0f71-8785-cb849e820f61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.222605] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925261, 'name': Rename_Task, 'duration_secs': 0.347632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.222797] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.223530] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45af3399-20e7-4150-b1c4-1fa0fbc5ed68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.231770] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 734.231770] env[69994]: value = "task-2925263" [ 734.231770] env[69994]: _type = "Task" [ 734.231770] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.245022] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.280723] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925262, 'name': PowerOffVM_Task, 'duration_secs': 0.16546} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.281124] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.281758] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.282182] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1d05c5-4ca5-443e-ad82-28722a8b32c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.292704] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.293014] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c16a1b7-448e-4651-b46e-e503f5d8ec41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.325577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.325857] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.326059] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Deleting the datastore file [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.326333] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e1d639b-ed2d-45db-8066-55484f49582d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.333973] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 734.333973] env[69994]: value = "task-2925265" [ 734.333973] env[69994]: _type = "Task" [ 734.333973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.343216] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.539303] env[69994]: DEBUG nova.scheduler.client.report [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.568276] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b38f9d-0cd5-0f71-8785-cb849e820f61, 'name': SearchDatastore_Task, 'duration_secs': 0.011081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.568569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.568799] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d/2ee43622-74f3-4bf6-88e3-cba4ff7ce33d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 734.569104] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a846431-9a77-4bf4-aa60-eacf3f433c76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.577124] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 734.577124] env[69994]: value = "task-2925266" [ 734.577124] env[69994]: _type = "Task" [ 734.577124] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.586516] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.749784] env[69994]: DEBUG oslo_vmware.api [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925263, 'name': PowerOnVM_Task, 'duration_secs': 0.504584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.752308] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.752308] env[69994]: INFO nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Took 7.23 seconds to spawn the instance on the hypervisor. [ 734.752308] env[69994]: DEBUG nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.752914] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c025150f-2e21-4025-a15d-6a69ca07a998 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.849634] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.849964] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.850332] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.850430] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.045708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.046337] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.049851] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.533s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.050218] env[69994]: DEBUG nova.objects.instance [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lazy-loading 'resources' on Instance uuid 70e5674d-4627-4720-9b87-955c2749e010 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.088875] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.089213] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d/2ee43622-74f3-4bf6-88e3-cba4ff7ce33d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 735.089429] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.089701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3a1e207-05fd-422e-a163-015ac6aa21d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.097639] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 735.097639] env[69994]: value = "task-2925267" [ 735.097639] env[69994]: _type = "Task" [ 735.097639] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.107473] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.275701] env[69994]: INFO nova.compute.manager [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Took 42.50 seconds to build instance. [ 735.554344] env[69994]: DEBUG nova.compute.utils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.559170] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.559533] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.606537] env[69994]: DEBUG nova.policy [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.614576] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076257} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.614991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.615756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8b2a24-1a08-4469-98de-73032832a664 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.642827] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d/2ee43622-74f3-4bf6-88e3-cba4ff7ce33d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.648218] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f01982-92a4-4c10-b407-149260b8d9eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.671034] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 735.671034] env[69994]: value = "task-2925268" [ 735.671034] env[69994]: _type = "Task" [ 735.671034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.680731] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.779364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fda64be-6d12-460c-8049-aa67050a8006 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.166s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.909618] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.909863] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.910030] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.910220] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.910362] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.910513] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.910795] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.911031] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.911240] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.911413] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.913431] env[69994]: DEBUG nova.virt.hardware [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.913431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d69b31-3aaf-4327-b549-5c621cb23244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.923683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b534150-366b-4f75-8530-35abcf5dccd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.945316] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.950987] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.953854] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.954304] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66657e42-cb76-4e45-84c3-722d644ece50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.979087] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.979087] env[69994]: value = "task-2925269" [ 735.979087] env[69994]: _type = "Task" [ 735.979087] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.988201] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925269, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.023173] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Successfully created port: 8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.059799] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.099290] env[69994]: DEBUG nova.compute.manager [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Received event network-changed-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 736.099478] env[69994]: DEBUG nova.compute.manager [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Refreshing instance network info cache due to event network-changed-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 736.099726] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] Acquiring lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.099876] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] Acquired lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.100047] env[69994]: DEBUG nova.network.neutron [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Refreshing network info cache for port c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.114918] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352afd2d-e66a-4e77-b232-c27f9bc7f251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.124554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f26f516-b27d-4cd6-a7e3-b2a86640bf5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.159963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d8fed0-5592-4c72-b86c-e1141cf3990d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.171076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c6298c-5c27-40ee-8638-9fa609daa223 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.190572] env[69994]: DEBUG nova.compute.provider_tree [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.196444] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.281552] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.490463] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925269, 'name': CreateVM_Task, 'duration_secs': 0.314033} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.490647] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.491079] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.491245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.491566] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.491825] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b93d3aa3-92ba-4fec-8f51-0ddd34ff6545 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.497273] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 736.497273] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52790cfd-b0c3-1b9c-d4d8-af101b327f34" [ 736.497273] env[69994]: _type = "Task" [ 736.497273] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.506329] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52790cfd-b0c3-1b9c-d4d8-af101b327f34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.688183] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925268, 'name': ReconfigVM_Task, 'duration_secs': 0.941596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.688183] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d/2ee43622-74f3-4bf6-88e3-cba4ff7ce33d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.689425] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c19c86a-f27c-4366-8572-6b5dd6fc540b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.696576] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 736.696576] env[69994]: value = "task-2925270" [ 736.696576] env[69994]: _type = "Task" [ 736.696576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.697372] env[69994]: DEBUG nova.scheduler.client.report [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.711020] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925270, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.812188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.867879] env[69994]: DEBUG nova.network.neutron [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updated VIF entry in instance network info cache for port c8e0f8d2-cf57-4669-8fe4-3fec8b7232df. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.868280] env[69994]: DEBUG nova.network.neutron [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating instance_info_cache with network_info: [{"id": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "address": "fa:16:3e:3e:df:14", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e0f8d2-cf", "ovs_interfaceid": "c8e0f8d2-cf57-4669-8fe4-3fec8b7232df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.008977] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52790cfd-b0c3-1b9c-d4d8-af101b327f34, 'name': SearchDatastore_Task, 'duration_secs': 0.009861} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.008977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.008977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.009314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.009368] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.009522] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.010336] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d5b1098-11a3-417c-b6ba-d5b3aa3a718a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.019038] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.019246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.019978] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f32fcfe6-04fb-4b83-8e80-53b27262534a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.025997] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 737.025997] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52049fdc-0be1-148b-f6e9-607d0a878fb8" [ 737.025997] env[69994]: _type = "Task" [ 737.025997] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.034235] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52049fdc-0be1-148b-f6e9-607d0a878fb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.077011] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.108469] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.108911] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.108911] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.108977] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.109134] env[69994]: DEBUG nova.virt.hardware [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.110018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4262b0a-6720-4955-8e45-6a5fa5e656da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.118997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99190dfd-cd56-4e1e-8ee4-07a90bf11662 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.207891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.209876] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925270, 'name': Rename_Task, 'duration_secs': 0.377841} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.210488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.526s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.210580] env[69994]: DEBUG nova.objects.instance [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lazy-loading 'resources' on Instance uuid e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 737.211828] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.212298] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6178245-b64c-4ea5-892f-758dfedde578 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.220367] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 737.220367] env[69994]: value = "task-2925271" [ 737.220367] env[69994]: _type = "Task" [ 737.220367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.229485] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.230432] env[69994]: INFO nova.scheduler.client.report [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Deleted allocations for instance 70e5674d-4627-4720-9b87-955c2749e010 [ 737.371567] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f720d5a-50c2-4929-a731-7188a41d4b84 req-d92a7a5b-93db-4559-afc0-3a7135fc5307 service nova] Releasing lock "refresh_cache-b00d09ea-5eee-47ed-adcb-288cdd362e89" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.504659] env[69994]: DEBUG nova.compute.manager [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-vif-plugged-8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.504971] env[69994]: DEBUG oslo_concurrency.lockutils [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.505380] env[69994]: DEBUG oslo_concurrency.lockutils [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.505573] env[69994]: DEBUG oslo_concurrency.lockutils [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.505801] env[69994]: DEBUG nova.compute.manager [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] No waiting events found dispatching network-vif-plugged-8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 737.505995] env[69994]: WARNING nova.compute.manager [req-819e1bd9-2d30-4ac3-8982-5e806fac4602 req-dee52de8-dbb8-46bc-a72b-5df3df8ac2b9 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received unexpected event network-vif-plugged-8b639504-b3a0-4772-9a06-af40fbe1667e for instance with vm_state building and task_state spawning. [ 737.538209] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52049fdc-0be1-148b-f6e9-607d0a878fb8, 'name': SearchDatastore_Task, 'duration_secs': 0.009848} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.539497] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c30bff6-9bfe-4b42-b9e0-97f6df9b3dd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.545937] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 737.545937] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525b0157-c5e7-05ec-6cf2-b9c90f65321f" [ 737.545937] env[69994]: _type = "Task" [ 737.545937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.555214] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525b0157-c5e7-05ec-6cf2-b9c90f65321f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.640183] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Successfully updated port: 8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.733125] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925271, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.740136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df284fa4-214b-415b-8518-6a1b4db3c6de tempest-FloatingIPsAssociationNegativeTestJSON-98860262 tempest-FloatingIPsAssociationNegativeTestJSON-98860262-project-member] Lock "70e5674d-4627-4720-9b87-955c2749e010" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.059624] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525b0157-c5e7-05ec-6cf2-b9c90f65321f, 'name': SearchDatastore_Task, 'duration_secs': 0.012061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.059624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.059924] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.060213] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38862e61-75aa-46c1-af60-e68a177d02ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.069352] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 738.069352] env[69994]: value = "task-2925272" [ 738.069352] env[69994]: _type = "Task" [ 738.069352] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.081667] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.143614] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.143762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.144673] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.182963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad221d9-26f8-4b4e-af86-70fb2c2de91c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.192035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8523b92f-8bc2-453c-8003-afe4554b500d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.230463] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea85602-a8b7-4100-acba-864212b5e54e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.239874] env[69994]: DEBUG oslo_vmware.api [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925271, 'name': PowerOnVM_Task, 'duration_secs': 0.963336} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.242762] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.243015] env[69994]: INFO nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Took 8.05 seconds to spawn the instance on the hypervisor. [ 738.243227] env[69994]: DEBUG nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.244345] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cba5d3-cd4c-4674-8a9a-318ef326a970 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.247930] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c1fe9d-b0ee-47bb-950e-819776db6f20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.271043] env[69994]: DEBUG nova.compute.provider_tree [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.582245] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.582523] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.582741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.582996] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7cb3d4c-370c-4630-b6a0-d889906d380c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.590082] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 738.590082] env[69994]: value = "task-2925273" [ 738.590082] env[69994]: _type = "Task" [ 738.590082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.598744] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.692164] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.780281] env[69994]: DEBUG nova.scheduler.client.report [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.797656] env[69994]: INFO nova.compute.manager [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Took 42.23 seconds to build instance. [ 738.872880] env[69994]: DEBUG nova.network.neutron [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.101457] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.101743] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.102640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb91f70-b94c-4075-81e3-19b51ab4bbdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.124213] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.124518] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60f54467-ca5d-440a-a456-fc54b722c44b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.147564] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 739.147564] env[69994]: value = "task-2925274" [ 739.147564] env[69994]: _type = "Task" [ 739.147564] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.159649] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925274, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.287836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.290198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.190s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.291150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.291150] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 739.291150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.292449] env[69994]: INFO nova.compute.claims [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.295860] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a483d280-b0ee-4455-8d4d-96089e3d68f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.299728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70d7ea69-2b6d-43c2-9473-d1a1f69760b1 tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.403s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.308711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b934f8-ece9-4691-b9f5-c912d28c13ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.324231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef109bd-3023-4b84-bfcb-bd2ff7e8033e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.330478] env[69994]: INFO nova.scheduler.client.report [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Deleted allocations for instance e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4 [ 739.339703] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7df17c-5d9f-4e5b-89a4-9c6c6b16cd40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.377717] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179926MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 739.377717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.378352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.378696] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Instance network_info: |[{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.379606] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:a3:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b639504-b3a0-4772-9a06-af40fbe1667e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.387718] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating folder: Project (66d57a69e0924b9abc2cc4e67fc8173c). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.388568] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6541f00-68d1-454d-b255-665c26154b9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.403465] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created folder: Project (66d57a69e0924b9abc2cc4e67fc8173c) in parent group-v587342. [ 739.403689] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating folder: Instances. Parent ref: group-v587441. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.404699] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45e1ebc7-d36b-4ddc-826d-9d0fcb379223 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.418042] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created folder: Instances in parent group-v587441. [ 739.418218] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.419032] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.419032] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5ea1c69-b139-49b7-bbcf-14c66203c221 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.442200] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.442200] env[69994]: value = "task-2925277" [ 739.442200] env[69994]: _type = "Task" [ 739.442200] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.450482] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925277, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.633111] env[69994]: DEBUG nova.compute.manager [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-changed-8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.633440] env[69994]: DEBUG nova.compute.manager [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing instance network info cache due to event network-changed-8b639504-b3a0-4772-9a06-af40fbe1667e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 739.633684] env[69994]: DEBUG oslo_concurrency.lockutils [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.633825] env[69994]: DEBUG oslo_concurrency.lockutils [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.634229] env[69994]: DEBUG nova.network.neutron [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing network info cache for port 8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.658963] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925274, 'name': ReconfigVM_Task, 'duration_secs': 0.326188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.659597] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528/1693ccdf-ea72-45d5-8b34-e2b0e155e528.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.662073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2926b9a3-5257-4239-9b66-c22a89304ad2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.670277] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 739.670277] env[69994]: value = "task-2925278" [ 739.670277] env[69994]: _type = "Task" [ 739.670277] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.682826] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925278, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.802861] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.843354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ddc1d8e-dc17-444d-936c-c4ee9c72a48c tempest-ServerAddressesTestJSON-10777695 tempest-ServerAddressesTestJSON-10777695-project-member] Lock "e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.524s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.953612] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925277, 'name': CreateVM_Task, 'duration_secs': 0.397214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.953968] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.954600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.955073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.955929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 739.956206] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d79bd7a9-2bb4-422a-a472-36b8e8db9796 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.961557] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 739.961557] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52923022-2c86-07cb-8a3c-c4e65f9cae18" [ 739.961557] env[69994]: _type = "Task" [ 739.961557] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.973017] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52923022-2c86-07cb-8a3c-c4e65f9cae18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.096740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.097052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.098479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.098479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.098479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.100079] env[69994]: INFO nova.compute.manager [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Terminating instance [ 740.182917] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925278, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.338996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.390365] env[69994]: DEBUG nova.network.neutron [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updated VIF entry in instance network info cache for port 8b639504-b3a0-4772-9a06-af40fbe1667e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 740.390365] env[69994]: DEBUG nova.network.neutron [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.475573] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52923022-2c86-07cb-8a3c-c4e65f9cae18, 'name': SearchDatastore_Task, 'duration_secs': 0.011309} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.475891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.476151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.476389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.477190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.477190] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.477190] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67b2abb6-d491-4e5d-a9d2-01ea82848e05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.489808] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 740.490014] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 740.493576] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a46449-a496-468d-8157-83ce9cb9ae08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.501090] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 740.501090] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523e4809-baa2-e299-c6b0-57264045dabc" [ 740.501090] env[69994]: _type = "Task" [ 740.501090] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.509755] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523e4809-baa2-e299-c6b0-57264045dabc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.604273] env[69994]: DEBUG nova.compute.manager [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 740.604509] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.605447] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a402753c-1662-40b7-a8d0-db531f8bb8d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.616534] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 740.616813] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-def552cd-4f5c-4f79-a5ab-33bf00378214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.627079] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 740.627079] env[69994]: value = "task-2925279" [ 740.627079] env[69994]: _type = "Task" [ 740.627079] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.640175] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.682024] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925278, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.783396] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c528a1d7-a5b0-427e-846e-fe491cf1ae2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.792171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b76d193-113e-4c46-833d-1e10461cf690 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.826476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ce3541-1402-4bb5-92a4-966856df860a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.837112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2af8a2d-b3d7-4048-9973-86317f9406e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.852631] env[69994]: DEBUG nova.compute.provider_tree [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.892605] env[69994]: DEBUG oslo_concurrency.lockutils [req-18496af9-0543-4cb3-8d57-15a3cc0c19b4 req-d5313b62-ccdd-412a-81a6-e3d2318fc287 service nova] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.014540] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523e4809-baa2-e299-c6b0-57264045dabc, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.015508] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075fb584-a8e0-438f-bf5b-edfe1878ec50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.021589] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 741.021589] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f6c10-9fcf-a5b0-8241-20ea5b75d272" [ 741.021589] env[69994]: _type = "Task" [ 741.021589] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.031080] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f6c10-9fcf-a5b0-8241-20ea5b75d272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.139627] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925279, 'name': PowerOffVM_Task, 'duration_secs': 0.376728} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.139720] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 741.140099] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 741.140099] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5aaf2615-6740-413f-8db5-ee27b75a5470 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.186942] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925278, 'name': Rename_Task, 'duration_secs': 1.202277} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.186942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.186942] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce93f76d-0690-416c-a657-41c36ee1075d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.195420] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 741.195420] env[69994]: value = "task-2925281" [ 741.195420] env[69994]: _type = "Task" [ 741.195420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.217324] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.219384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.219384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.219384] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Deleting the datastore file [datastore1] 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.219521] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f11c093f-c9c3-4e7f-9898-173f94273396 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.230051] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for the task: (returnval){ [ 741.230051] env[69994]: value = "task-2925282" [ 741.230051] env[69994]: _type = "Task" [ 741.230051] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.241435] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.356858] env[69994]: DEBUG nova.scheduler.client.report [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.537269] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523f6c10-9fcf-a5b0-8241-20ea5b75d272, 'name': SearchDatastore_Task, 'duration_secs': 0.010826} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.537269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.537269] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e46b8a11-650a-4e34-bc4a-e1c1b2515e76/e46b8a11-650a-4e34-bc4a-e1c1b2515e76.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.537269] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60d7804a-52bd-4184-85e3-f2e9ff2c3718 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.545021] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 741.545021] env[69994]: value = "task-2925283" [ 741.545021] env[69994]: _type = "Task" [ 741.545021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.552518] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.704671] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925281, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.741107] env[69994]: DEBUG oslo_vmware.api [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Task: {'id': task-2925282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389456} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.741492] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.741766] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.742044] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.742304] env[69994]: INFO nova.compute.manager [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 741.742650] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.742941] env[69994]: DEBUG nova.compute.manager [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.743118] env[69994]: DEBUG nova.network.neutron [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.862190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.862760] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.866395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.548s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.868214] env[69994]: INFO nova.compute.claims [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.060791] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925283, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.206178] env[69994]: DEBUG oslo_vmware.api [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925281, 'name': PowerOnVM_Task, 'duration_secs': 0.551978} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.206178] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.206403] env[69994]: DEBUG nova.compute.manager [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.209173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3013a6b1-f0f0-45f7-bd15-0cb240f65e0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.295026] env[69994]: DEBUG nova.compute.manager [req-38a363ba-3d22-4097-a2a0-3cec8ad91833 req-a7ff45a2-3a0c-47f2-ac4b-36e34a94cd4e service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Received event network-vif-deleted-03117278-4ae8-434d-8093-80636a495ec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 742.295026] env[69994]: INFO nova.compute.manager [req-38a363ba-3d22-4097-a2a0-3cec8ad91833 req-a7ff45a2-3a0c-47f2-ac4b-36e34a94cd4e service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Neutron deleted interface 03117278-4ae8-434d-8093-80636a495ec6; detaching it from the instance and deleting it from the info cache [ 742.295026] env[69994]: DEBUG nova.network.neutron [req-38a363ba-3d22-4097-a2a0-3cec8ad91833 req-a7ff45a2-3a0c-47f2-ac4b-36e34a94cd4e service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.376982] env[69994]: DEBUG nova.compute.utils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.378378] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.378552] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.454503] env[69994]: DEBUG nova.policy [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7be902c21aba40e1ac159ffa787eea04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d72179a46b64984b9ef219161bfcd76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.554951] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530637} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.555158] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e46b8a11-650a-4e34-bc4a-e1c1b2515e76/e46b8a11-650a-4e34-bc4a-e1c1b2515e76.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.555369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.555651] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01011d5c-2d92-4ba0-b891-f77e27085bf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.563343] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 742.563343] env[69994]: value = "task-2925284" [ 742.563343] env[69994]: _type = "Task" [ 742.563343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.572396] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.671333] env[69994]: DEBUG nova.network.neutron [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.729241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.797523] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e2db995-dd80-4804-bdaa-a660d0bc0365 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.804171] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Successfully created port: da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.814248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600dbeec-ed79-461b-8830-df0465e292e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.856424] env[69994]: DEBUG nova.compute.manager [req-38a363ba-3d22-4097-a2a0-3cec8ad91833 req-a7ff45a2-3a0c-47f2-ac4b-36e34a94cd4e service nova] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Detach interface failed, port_id=03117278-4ae8-434d-8093-80636a495ec6, reason: Instance 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 742.882817] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.080248] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.080492] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.081377] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538ecae6-0dd5-441a-9c86-c2bab522281e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.114928] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] e46b8a11-650a-4e34-bc4a-e1c1b2515e76/e46b8a11-650a-4e34-bc4a-e1c1b2515e76.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.118400] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87602446-3714-4e55-9155-649ef152d468 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.142892] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 743.142892] env[69994]: value = "task-2925285" [ 743.142892] env[69994]: _type = "Task" [ 743.142892] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.158625] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.177341] env[69994]: INFO nova.compute.manager [-] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Took 1.43 seconds to deallocate network for instance. [ 743.486330] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad87ea39-434d-4ae8-afeb-702fc5eb2836 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.493245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684cf51c-d760-4133-8092-ffdd179ddac8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.535559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a665c043-53ce-4057-8947-fba0e1e50097 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.545128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d18077-3726-4a4c-9e85-65c9b9b8f786 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.561610] env[69994]: DEBUG nova.compute.provider_tree [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.654349] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.686804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.855434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.855747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.856289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.856289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.856464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.858585] env[69994]: INFO nova.compute.manager [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Terminating instance [ 743.896428] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 743.929306] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.929631] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.929840] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.930089] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.930808] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.931036] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.931299] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.931503] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.931715] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.931929] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.932173] env[69994]: DEBUG nova.virt.hardware [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.933157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d938963a-8c23-498d-8953-59c0741430d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.943894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a347506-db99-479c-af85-08ccf173f840 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.068280] env[69994]: DEBUG nova.scheduler.client.report [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.156694] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925285, 'name': ReconfigVM_Task, 'duration_secs': 0.687753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.156968] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfigured VM instance instance-0000001f to attach disk [datastore1] e46b8a11-650a-4e34-bc4a-e1c1b2515e76/e46b8a11-650a-4e34-bc4a-e1c1b2515e76.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.157885] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-099a8700-ade1-4817-b7ed-98ab4610ecd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.167088] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 744.167088] env[69994]: value = "task-2925286" [ 744.167088] env[69994]: _type = "Task" [ 744.167088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.179409] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925286, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.360933] env[69994]: DEBUG nova.compute.manager [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Received event network-vif-plugged-da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 744.361300] env[69994]: DEBUG oslo_concurrency.lockutils [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.361431] env[69994]: DEBUG oslo_concurrency.lockutils [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.361714] env[69994]: DEBUG oslo_concurrency.lockutils [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.361876] env[69994]: DEBUG nova.compute.manager [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] No waiting events found dispatching network-vif-plugged-da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 744.362595] env[69994]: WARNING nova.compute.manager [req-c60b3295-ba1a-4efb-8406-4238b4ad84e8 req-e1f59661-5010-438b-87a8-5a3b47a08814 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Received unexpected event network-vif-plugged-da926370-b1f8-440c-a006-0135408e8d6f for instance with vm_state building and task_state spawning. [ 744.362943] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "refresh_cache-1693ccdf-ea72-45d5-8b34-e2b0e155e528" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.363358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquired lock "refresh_cache-1693ccdf-ea72-45d5-8b34-e2b0e155e528" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.363358] env[69994]: DEBUG nova.network.neutron [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.471637] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Successfully updated port: da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.574174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.574702] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.577942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.075s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.579515] env[69994]: INFO nova.compute.claims [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.677566] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925286, 'name': Rename_Task, 'duration_secs': 0.246808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.677868] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.678143] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-146a1af5-033c-459a-ad60-9284635f7e24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.686279] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 744.686279] env[69994]: value = "task-2925287" [ 744.686279] env[69994]: _type = "Task" [ 744.686279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.695019] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.887345] env[69994]: DEBUG nova.network.neutron [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.944535] env[69994]: DEBUG nova.network.neutron [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.977929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.978038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.978194] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.079286] env[69994]: DEBUG nova.compute.utils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.081054] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.081054] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.131436] env[69994]: DEBUG nova.policy [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46492cc97ceb4e6cb14697b3bd6493d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fe28fa02f104475980a34f35dc815a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.196909] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925287, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.407155] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Successfully created port: 77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.447289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Releasing lock "refresh_cache-1693ccdf-ea72-45d5-8b34-e2b0e155e528" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.447751] env[69994]: DEBUG nova.compute.manager [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 745.448248] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.448920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5660ce4-6725-45c5-a711-145274677e26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.457964] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.458258] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-181bbea0-4b55-4d40-ae1a-e2a3eaa5494c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.466561] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 745.466561] env[69994]: value = "task-2925288" [ 745.466561] env[69994]: _type = "Task" [ 745.466561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.475687] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.513650] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.584557] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 745.703469] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925287, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.716774] env[69994]: DEBUG nova.network.neutron [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating instance_info_cache with network_info: [{"id": "da926370-b1f8-440c-a006-0135408e8d6f", "address": "fa:16:3e:bf:e4:7d", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda926370-b1", "ovs_interfaceid": "da926370-b1f8-440c-a006-0135408e8d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.976275] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925288, 'name': PowerOffVM_Task, 'duration_secs': 0.133248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.976568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.976710] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.976957] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7447c1e1-3215-408a-8a3a-d2f566081ebb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.006745] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.006745] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.006745] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Deleting the datastore file [datastore1] 1693ccdf-ea72-45d5-8b34-e2b0e155e528 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.007132] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4adfbe31-37b9-4328-b4e4-9ae069981f1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.014635] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for the task: (returnval){ [ 746.014635] env[69994]: value = "task-2925290" [ 746.014635] env[69994]: _type = "Task" [ 746.014635] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.025880] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.039355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1404af66-d3bd-49e2-833e-b9736b067090 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.047111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d633e29a-5976-4066-98db-38bd6e065c1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.077677] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904f8efc-e567-4572-8aa8-c3641ea7fb2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.086135] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3507e4eb-1967-4814-945d-b5d223aecc9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.103227] env[69994]: DEBUG nova.compute.provider_tree [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.197742] env[69994]: DEBUG oslo_vmware.api [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925287, 'name': PowerOnVM_Task, 'duration_secs': 1.124673} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.198037] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.198241] env[69994]: INFO nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Took 9.12 seconds to spawn the instance on the hypervisor. [ 746.198419] env[69994]: DEBUG nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.199169] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dc43eb-732c-4730-8857-0386b4c1e12b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.220524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.220806] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Instance network_info: |[{"id": "da926370-b1f8-440c-a006-0135408e8d6f", "address": "fa:16:3e:bf:e4:7d", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda926370-b1", "ovs_interfaceid": "da926370-b1f8-440c-a006-0135408e8d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.222188] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:e4:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da926370-b1f8-440c-a006-0135408e8d6f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.228848] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating folder: Project (3d72179a46b64984b9ef219161bfcd76). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.229859] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c6852df-2c33-435b-ae05-acf859f14baa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.241455] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created folder: Project (3d72179a46b64984b9ef219161bfcd76) in parent group-v587342. [ 746.241653] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating folder: Instances. Parent ref: group-v587444. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.242171] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-143cfb78-d118-4855-bca2-b519846c12e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.251472] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created folder: Instances in parent group-v587444. [ 746.251702] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.251888] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.252104] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26530815-1527-4922-91a2-7d8de6f5c455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.270672] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.270672] env[69994]: value = "task-2925293" [ 746.270672] env[69994]: _type = "Task" [ 746.270672] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.278146] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925293, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.385986] env[69994]: DEBUG nova.compute.manager [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Received event network-changed-da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.386218] env[69994]: DEBUG nova.compute.manager [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Refreshing instance network info cache due to event network-changed-da926370-b1f8-440c-a006-0135408e8d6f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 746.386436] env[69994]: DEBUG oslo_concurrency.lockutils [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] Acquiring lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.386577] env[69994]: DEBUG oslo_concurrency.lockutils [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] Acquired lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.386733] env[69994]: DEBUG nova.network.neutron [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Refreshing network info cache for port da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.527458] env[69994]: DEBUG oslo_vmware.api [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Task: {'id': task-2925290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109457} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.527795] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.528025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 746.528266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.528473] env[69994]: INFO nova.compute.manager [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Took 1.08 seconds to destroy the instance on the hypervisor. [ 746.528746] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.529050] env[69994]: DEBUG nova.compute.manager [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 746.529229] env[69994]: DEBUG nova.network.neutron [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.549984] env[69994]: DEBUG nova.network.neutron [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.604741] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 746.608050] env[69994]: DEBUG nova.scheduler.client.report [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.632817] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 746.633114] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.633278] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 746.633459] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.633603] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 746.633744] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 746.633947] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 746.634117] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 746.634280] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 746.634441] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 746.634611] env[69994]: DEBUG nova.virt.hardware [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 746.635707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc7d4ae-7dc5-4626-afea-77d348810063 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.643792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66eb8485-3452-431f-a9b1-81c79974a24f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.716010] env[69994]: INFO nova.compute.manager [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Took 44.94 seconds to build instance. [ 746.781755] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925293, 'name': CreateVM_Task, 'duration_secs': 0.354486} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.781943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.782636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.782829] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.783156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.783435] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f056310e-836c-4a0d-b7c7-aced75308f3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.788731] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 746.788731] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226eaa9-845c-6b2a-33e5-850db33c4960" [ 746.788731] env[69994]: _type = "Task" [ 746.788731] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.798400] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226eaa9-845c-6b2a-33e5-850db33c4960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.052519] env[69994]: DEBUG nova.network.neutron [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.063213] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Successfully updated port: 77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.113669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.114212] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.116914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.118283] env[69994]: INFO nova.compute.claims [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.217279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c007323d-6627-47d7-bc62-ea586d8bf17a tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.300s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.300447] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226eaa9-845c-6b2a-33e5-850db33c4960, 'name': SearchDatastore_Task, 'duration_secs': 0.010796} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.300773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.301067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.301457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.301538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.301690] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.301960] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c67615e8-20bd-4695-84f1-8340c2e3a335 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.312186] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.312378] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 747.313159] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bce3347d-5661-4de6-975e-7b63e74b48c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.318914] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 747.318914] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200823c-a7aa-5c01-b23a-fcd9b3c97e63" [ 747.318914] env[69994]: _type = "Task" [ 747.318914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.327296] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200823c-a7aa-5c01-b23a-fcd9b3c97e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.484311] env[69994]: DEBUG nova.network.neutron [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updated VIF entry in instance network info cache for port da926370-b1f8-440c-a006-0135408e8d6f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.484499] env[69994]: DEBUG nova.network.neutron [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating instance_info_cache with network_info: [{"id": "da926370-b1f8-440c-a006-0135408e8d6f", "address": "fa:16:3e:bf:e4:7d", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda926370-b1", "ovs_interfaceid": "da926370-b1f8-440c-a006-0135408e8d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.556975] env[69994]: INFO nova.compute.manager [-] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Took 1.03 seconds to deallocate network for instance. [ 747.567488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.567694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.567813] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.629860] env[69994]: DEBUG nova.compute.utils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 747.631256] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.631419] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.705765] env[69994]: DEBUG nova.policy [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46492cc97ceb4e6cb14697b3bd6493d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fe28fa02f104475980a34f35dc815a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.719291] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.829845] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200823c-a7aa-5c01-b23a-fcd9b3c97e63, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.830673] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72a8bcf-03e4-42c1-a207-5cb0121336d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.836478] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 747.836478] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3284a-8260-4335-4a30-ef528e76d63f" [ 747.836478] env[69994]: _type = "Task" [ 747.836478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.848022] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3284a-8260-4335-4a30-ef528e76d63f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.978698] env[69994]: DEBUG nova.compute.manager [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-changed-8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.978898] env[69994]: DEBUG nova.compute.manager [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing instance network info cache due to event network-changed-8b639504-b3a0-4772-9a06-af40fbe1667e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 747.979138] env[69994]: DEBUG oslo_concurrency.lockutils [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.980224] env[69994]: DEBUG oslo_concurrency.lockutils [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.980224] env[69994]: DEBUG nova.network.neutron [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing network info cache for port 8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.987876] env[69994]: DEBUG oslo_concurrency.lockutils [req-b375c41a-1227-4772-8398-680d1763764a req-d65429b2-a188-4ed8-883c-aea57bf41732 service nova] Releasing lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.061773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.098145] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.136327] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.144201] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Successfully created port: ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.240678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.252549] env[69994]: DEBUG nova.network.neutron [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updating instance_info_cache with network_info: [{"id": "77322171-a52e-49ee-a04c-5aecaebff021", "address": "fa:16:3e:4c:a0:a8", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77322171-a5", "ovs_interfaceid": "77322171-a52e-49ee-a04c-5aecaebff021", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.351136] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3284a-8260-4335-4a30-ef528e76d63f, 'name': SearchDatastore_Task, 'duration_secs': 0.012059} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.351416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.351717] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] ab320e59-febb-4f8f-9bc4-74227d29c752/ab320e59-febb-4f8f-9bc4-74227d29c752.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.351937] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e84cf13d-a7b6-45d4-aaaa-1a32ba022a8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.363746] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 748.363746] env[69994]: value = "task-2925294" [ 748.363746] env[69994]: _type = "Task" [ 748.363746] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.377547] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.428760] env[69994]: DEBUG nova.compute.manager [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Received event network-vif-plugged-77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.429184] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Acquiring lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.429405] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.429576] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.429873] env[69994]: DEBUG nova.compute.manager [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] No waiting events found dispatching network-vif-plugged-77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.429990] env[69994]: WARNING nova.compute.manager [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Received unexpected event network-vif-plugged-77322171-a52e-49ee-a04c-5aecaebff021 for instance with vm_state building and task_state spawning. [ 748.430115] env[69994]: DEBUG nova.compute.manager [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Received event network-changed-77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.430276] env[69994]: DEBUG nova.compute.manager [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Refreshing instance network info cache due to event network-changed-77322171-a52e-49ee-a04c-5aecaebff021. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 748.430450] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Acquiring lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.618357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442512e0-e25b-497e-bc11-e7b06f6896cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.631224] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53830638-0771-4e5a-a2f9-7aa60dc9ffd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.674389] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5f72f7-6e32-45c0-9fbc-85df0f131114 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.685281] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d18d183-8bf3-4a5f-8e97-f0f794e0fce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.702693] env[69994]: DEBUG nova.compute.provider_tree [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.755597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.756016] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Instance network_info: |[{"id": "77322171-a52e-49ee-a04c-5aecaebff021", "address": "fa:16:3e:4c:a0:a8", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77322171-a5", "ovs_interfaceid": "77322171-a52e-49ee-a04c-5aecaebff021", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 748.756353] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Acquired lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.756619] env[69994]: DEBUG nova.network.neutron [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Refreshing network info cache for port 77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.758783] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:a0:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77322171-a52e-49ee-a04c-5aecaebff021', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.772532] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Creating folder: Project (9fe28fa02f104475980a34f35dc815a2). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.776889] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca45a025-b155-41f4-a079-341143b2ca06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.796533] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Created folder: Project (9fe28fa02f104475980a34f35dc815a2) in parent group-v587342. [ 748.798221] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Creating folder: Instances. Parent ref: group-v587447. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.799108] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-772f142d-56b8-4201-801b-281ff3b99212 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.812782] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Created folder: Instances in parent group-v587447. [ 748.813078] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 748.813303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.813596] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bca0dda3-2466-4788-a9bd-d2d177e60f8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.836456] env[69994]: DEBUG nova.network.neutron [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updated VIF entry in instance network info cache for port 8b639504-b3a0-4772-9a06-af40fbe1667e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 748.836713] env[69994]: DEBUG nova.network.neutron [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.848879] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.848879] env[69994]: value = "task-2925297" [ 748.848879] env[69994]: _type = "Task" [ 748.848879] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.860667] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925297, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.875953] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925294, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.042026] env[69994]: DEBUG nova.network.neutron [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updated VIF entry in instance network info cache for port 77322171-a52e-49ee-a04c-5aecaebff021. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.042420] env[69994]: DEBUG nova.network.neutron [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updating instance_info_cache with network_info: [{"id": "77322171-a52e-49ee-a04c-5aecaebff021", "address": "fa:16:3e:4c:a0:a8", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77322171-a5", "ovs_interfaceid": "77322171-a52e-49ee-a04c-5aecaebff021", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.175413] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.202728] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.203019] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.203186] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.203366] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.203513] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.203659] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.203897] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.204046] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.204522] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.204522] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.204645] env[69994]: DEBUG nova.virt.hardware [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.205494] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2964431c-0d1a-4cdb-bd5b-6bc02534d244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.208953] env[69994]: DEBUG nova.scheduler.client.report [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.218086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce4f247-17b7-4db3-8750-b7c280d01857 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.342748] env[69994]: DEBUG oslo_concurrency.lockutils [req-c55c4068-ca3d-46f0-b36a-1e58110c63d3 req-cb635b8e-8eb1-42ae-8434-1e79b60442bb service nova] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.360701] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925297, 'name': CreateVM_Task, 'duration_secs': 0.423214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.360940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.361643] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.361942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.362201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.362476] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15046e7f-5ee6-4362-9679-936884192a51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.369848] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 749.369848] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529c01dd-d9ab-9072-a7ef-30c50edef9fa" [ 749.369848] env[69994]: _type = "Task" [ 749.369848] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.376531] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.377107] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] ab320e59-febb-4f8f-9bc4-74227d29c752/ab320e59-febb-4f8f-9bc4-74227d29c752.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.377320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.377649] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ffa0fdb-dd9c-4ea8-af5c-9f961f5ae082 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.382859] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529c01dd-d9ab-9072-a7ef-30c50edef9fa, 'name': SearchDatastore_Task, 'duration_secs': 0.012176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.384082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.384319] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.384553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.384704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.384902] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.385166] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adfb0dfa-b8ec-43e0-bbe4-835029ce338d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.388849] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 749.388849] env[69994]: value = "task-2925298" [ 749.388849] env[69994]: _type = "Task" [ 749.388849] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.394031] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.394208] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.397848] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd432ec-6e84-4730-8aa4-975caed158fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.399790] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.402927] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 749.402927] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52773068-c993-e383-2b8a-12564d1e5b3f" [ 749.402927] env[69994]: _type = "Task" [ 749.402927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.412122] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52773068-c993-e383-2b8a-12564d1e5b3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.545748] env[69994]: DEBUG oslo_concurrency.lockutils [req-ca18d517-5d67-4b0e-9b68-4b0739ae7506 req-322705ad-35cc-41f3-b968-a820a5799804 service nova] Releasing lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.671415] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Successfully updated port: ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.713748] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.714378] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.716940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.431s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.717225] env[69994]: DEBUG nova.objects.instance [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lazy-loading 'resources' on Instance uuid e87e1839-9fef-462d-b1ab-842ef76828a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.901330] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084166} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.901610] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.902408] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9f506d-c4da-4ee5-82e7-09dd13e9e3f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.913972] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52773068-c993-e383-2b8a-12564d1e5b3f, 'name': SearchDatastore_Task, 'duration_secs': 0.011169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.933150] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] ab320e59-febb-4f8f-9bc4-74227d29c752/ab320e59-febb-4f8f-9bc4-74227d29c752.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.933439] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f59095a-2c4b-468f-9d5e-48d0f6fcb085 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.935755] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd0d3f20-7d66-4c2d-bf1c-84ec8781c3dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.954146] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 749.954146] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a95e2-d89e-b2c8-dbba-c199f8df6c79" [ 749.954146] env[69994]: _type = "Task" [ 749.954146] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.958177] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 749.958177] env[69994]: value = "task-2925299" [ 749.958177] env[69994]: _type = "Task" [ 749.958177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.964252] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a95e2-d89e-b2c8-dbba-c199f8df6c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.969129] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925299, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.011872] env[69994]: DEBUG nova.compute.manager [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Received event network-vif-plugged-ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.012042] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Acquiring lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.012267] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.012432] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.012599] env[69994]: DEBUG nova.compute.manager [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] No waiting events found dispatching network-vif-plugged-ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 750.012789] env[69994]: WARNING nova.compute.manager [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Received unexpected event network-vif-plugged-ef5413fb-71f6-42a8-a79c-ae48503f6015 for instance with vm_state building and task_state spawning. [ 750.012973] env[69994]: DEBUG nova.compute.manager [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Received event network-changed-ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.013143] env[69994]: DEBUG nova.compute.manager [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Refreshing instance network info cache due to event network-changed-ef5413fb-71f6-42a8-a79c-ae48503f6015. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 750.013326] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Acquiring lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.013461] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Acquired lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.013612] env[69994]: DEBUG nova.network.neutron [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Refreshing network info cache for port ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.173603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.223058] env[69994]: DEBUG nova.compute.utils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.227827] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.228127] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.271143] env[69994]: DEBUG nova.policy [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46492cc97ceb4e6cb14697b3bd6493d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fe28fa02f104475980a34f35dc815a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 750.466307] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a95e2-d89e-b2c8-dbba-c199f8df6c79, 'name': SearchDatastore_Task, 'duration_secs': 0.016051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.469158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.469428] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] a589ddb9-947b-4ff4-94f6-1fab4bdb874b/a589ddb9-947b-4ff4-94f6-1fab4bdb874b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.469874] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5589bf24-6084-41bb-bdbb-1ed15cb1c528 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.475106] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925299, 'name': ReconfigVM_Task, 'duration_secs': 0.458838} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.475336] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfigured VM instance instance-00000020 to attach disk [datastore1] ab320e59-febb-4f8f-9bc4-74227d29c752/ab320e59-febb-4f8f-9bc4-74227d29c752.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.475983] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a84c24c-0ff7-4fea-ae99-89119d9e2129 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.482541] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 750.482541] env[69994]: value = "task-2925300" [ 750.482541] env[69994]: _type = "Task" [ 750.482541] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.488200] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 750.488200] env[69994]: value = "task-2925301" [ 750.488200] env[69994]: _type = "Task" [ 750.488200] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.494662] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.503159] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925301, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.560871] env[69994]: DEBUG nova.network.neutron [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.572100] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Successfully created port: 92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.657576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d1ba1c-c902-4356-862f-8751c5eff761 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.672182] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5f00d3-ff8d-4594-8b53-c21438fb7468 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.712206] env[69994]: DEBUG nova.network.neutron [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.715426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de56b12f-a6fe-4242-ba92-5fa442f3b0ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.729011] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.733504] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa8bc24-1d7e-444a-b4a4-dd203f08df70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.759367] env[69994]: DEBUG nova.compute.provider_tree [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.995766] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505027} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.996074] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] a589ddb9-947b-4ff4-94f6-1fab4bdb874b/a589ddb9-947b-4ff4-94f6-1fab4bdb874b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.996321] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.996575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc2a0d73-6ad9-4803-a49b-ced8157ca84f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.001442] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925301, 'name': Rename_Task, 'duration_secs': 0.164499} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.002010] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.002248] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab69baa6-ce29-4d33-a5eb-cd849c815bbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.006902] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 751.006902] env[69994]: value = "task-2925302" [ 751.006902] env[69994]: _type = "Task" [ 751.006902] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.011034] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 751.011034] env[69994]: value = "task-2925303" [ 751.011034] env[69994]: _type = "Task" [ 751.011034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.027368] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925302, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.027650] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.219675] env[69994]: DEBUG oslo_concurrency.lockutils [req-da9e5ead-4feb-4eb5-aa39-d254dcbc0b24 req-73d14390-bf41-4094-8835-768a0a3dd02e service nova] Releasing lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.220091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.220260] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.267161] env[69994]: DEBUG nova.scheduler.client.report [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.521667] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925302, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.524379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.524662] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925303, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.525384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5f5ff3-7da9-4b19-a480-b80804872d6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.546850] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] a589ddb9-947b-4ff4-94f6-1fab4bdb874b/a589ddb9-947b-4ff4-94f6-1fab4bdb874b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.547421] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efba21fd-87ca-4661-ab29-d25be5a467b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.567279] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 751.567279] env[69994]: value = "task-2925304" [ 751.567279] env[69994]: _type = "Task" [ 751.567279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.575399] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.748608] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.767965] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.770759] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.774326] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.774548] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.774701] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.775031] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.775069] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.775202] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.775409] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.775566] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.775726] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.776010] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.776215] env[69994]: DEBUG nova.virt.hardware [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.776706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.744s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.776915] env[69994]: DEBUG nova.objects.instance [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lazy-loading 'resources' on Instance uuid db9f7abd-ab45-49a3-9035-695b26756142 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 751.778896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ab00a5-8ae1-4a86-a929-e41db2885988 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.788767] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6071f3e-33ce-45d4-ad21-c7f642d0a5f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.805764] env[69994]: INFO nova.scheduler.client.report [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Deleted allocations for instance e87e1839-9fef-462d-b1ab-842ef76828a4 [ 752.004468] env[69994]: DEBUG nova.network.neutron [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Updating instance_info_cache with network_info: [{"id": "ef5413fb-71f6-42a8-a79c-ae48503f6015", "address": "fa:16:3e:9e:1e:81", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef5413fb-71", "ovs_interfaceid": "ef5413fb-71f6-42a8-a79c-ae48503f6015", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.028642] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925303, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.079094] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925304, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.224309] env[69994]: DEBUG nova.compute.manager [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Received event network-vif-plugged-92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.224309] env[69994]: DEBUG oslo_concurrency.lockutils [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] Acquiring lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.224309] env[69994]: DEBUG oslo_concurrency.lockutils [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.225072] env[69994]: DEBUG oslo_concurrency.lockutils [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.225467] env[69994]: DEBUG nova.compute.manager [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] No waiting events found dispatching network-vif-plugged-92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 752.226192] env[69994]: WARNING nova.compute.manager [req-75b3914a-9cb6-4028-ac97-8325e069487f req-eae5fd31-eacb-4e72-9d63-0b0071bf40d6 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Received unexpected event network-vif-plugged-92b4ab11-10bb-4f7a-8820-908a4fb30d9b for instance with vm_state building and task_state spawning. [ 752.296976] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Successfully updated port: 92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 752.313763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb4cec86-ab80-4617-99f6-593133410b8d tempest-AttachInterfacesV270Test-1818994759 tempest-AttachInterfacesV270Test-1818994759-project-member] Lock "e87e1839-9fef-462d-b1ab-842ef76828a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.508326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "refresh_cache-e0764e41-0810-45a1-8917-ac901f0f8321" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.508674] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance network_info: |[{"id": "ef5413fb-71f6-42a8-a79c-ae48503f6015", "address": "fa:16:3e:9e:1e:81", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef5413fb-71", "ovs_interfaceid": "ef5413fb-71f6-42a8-a79c-ae48503f6015", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 752.509118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:1e:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef5413fb-71f6-42a8-a79c-ae48503f6015', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.516774] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.517059] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.522224] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe9ec199-703a-48fe-9917-a1b39c01714f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.542174] env[69994]: DEBUG oslo_vmware.api [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925303, 'name': PowerOnVM_Task, 'duration_secs': 1.31335} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.543457] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.543674] env[69994]: INFO nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Took 8.65 seconds to spawn the instance on the hypervisor. [ 752.543925] env[69994]: DEBUG nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.544216] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.544216] env[69994]: value = "task-2925305" [ 752.544216] env[69994]: _type = "Task" [ 752.544216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.545348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49700fff-9ce7-4212-b745-33f9844c8ef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.561138] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925305, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.577987] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925304, 'name': ReconfigVM_Task, 'duration_secs': 0.843156} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.577987] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Reconfigured VM instance instance-00000021 to attach disk [datastore1] a589ddb9-947b-4ff4-94f6-1fab4bdb874b/a589ddb9-947b-4ff4-94f6-1fab4bdb874b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.580928] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29fda048-0244-4291-8fd8-8c91e2bee946 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.588228] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 752.588228] env[69994]: value = "task-2925306" [ 752.588228] env[69994]: _type = "Task" [ 752.588228] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.598347] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925306, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.803400] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.803560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.803710] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.842043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63615abe-887b-49d6-8c08-624c3740af74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.849564] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e93b8f-a0ef-48bb-81e1-04d725fc0cdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.883831] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4d4984-badc-4bd2-a4af-31927303091a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.893170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dff6792-5824-436b-b37f-97181cffedf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.909295] env[69994]: DEBUG nova.compute.provider_tree [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.059160] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925305, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.070058] env[69994]: INFO nova.compute.manager [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Took 40.40 seconds to build instance. [ 753.099744] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925306, 'name': Rename_Task, 'duration_secs': 0.212593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.100285] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.100668] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-146f99c2-7925-40cc-9315-37c8565c0a23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.108104] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 753.108104] env[69994]: value = "task-2925307" [ 753.108104] env[69994]: _type = "Task" [ 753.108104] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.118840] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.348178] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.413815] env[69994]: DEBUG nova.scheduler.client.report [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.538647] env[69994]: DEBUG nova.network.neutron [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Updating instance_info_cache with network_info: [{"id": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "address": "fa:16:3e:23:62:56", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b4ab11-10", "ovs_interfaceid": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.566639] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925305, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.576992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-840c663a-3516-4b7c-af6a-9415c44af21c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.620796] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925307, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.920014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.922652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.889s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.923099] env[69994]: DEBUG nova.objects.instance [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lazy-loading 'resources' on Instance uuid c512ee01-7d45-49f0-b2ce-659392527264 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 753.944707] env[69994]: INFO nova.scheduler.client.report [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted allocations for instance db9f7abd-ab45-49a3-9035-695b26756142 [ 754.043913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.044286] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Instance network_info: |[{"id": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "address": "fa:16:3e:23:62:56", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b4ab11-10", "ovs_interfaceid": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 754.044701] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:62:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b4ab11-10bb-4f7a-8820-908a4fb30d9b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.055348] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 754.055665] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 754.059796] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed8d82f7-4bf7-407a-b283-0054b30b1b0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.080236] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.088633] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925305, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.089611] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.089611] env[69994]: value = "task-2925308" [ 754.089611] env[69994]: _type = "Task" [ 754.089611] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.098446] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925308, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.101383] env[69994]: DEBUG nova.compute.manager [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Received event network-changed-da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.101572] env[69994]: DEBUG nova.compute.manager [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Refreshing instance network info cache due to event network-changed-da926370-b1f8-440c-a006-0135408e8d6f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.101791] env[69994]: DEBUG oslo_concurrency.lockutils [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] Acquiring lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.101938] env[69994]: DEBUG oslo_concurrency.lockutils [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] Acquired lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.102116] env[69994]: DEBUG nova.network.neutron [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Refreshing network info cache for port da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.124277] env[69994]: DEBUG oslo_vmware.api [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925307, 'name': PowerOnVM_Task, 'duration_secs': 0.602869} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.124277] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.124277] env[69994]: INFO nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Took 7.52 seconds to spawn the instance on the hypervisor. [ 754.124277] env[69994]: DEBUG nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 754.125299] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8621af-f22f-46b4-b20c-c6fab0673fe4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.269018] env[69994]: DEBUG nova.compute.manager [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Received event network-changed-92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.269018] env[69994]: DEBUG nova.compute.manager [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Refreshing instance network info cache due to event network-changed-92b4ab11-10bb-4f7a-8820-908a4fb30d9b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.269223] env[69994]: DEBUG oslo_concurrency.lockutils [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] Acquiring lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.269578] env[69994]: DEBUG oslo_concurrency.lockutils [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] Acquired lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.269834] env[69994]: DEBUG nova.network.neutron [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Refreshing network info cache for port 92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.457845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a7aae669-5603-4dba-9001-ebd12a0e6ed2 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "db9f7abd-ab45-49a3-9035-695b26756142" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.973s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.566253] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925305, 'name': CreateVM_Task, 'duration_secs': 1.560546} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.568673] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.569726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.570042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.570271] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.570603] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc6a4f10-f49a-4edb-84f7-92fb8aae2d45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.576496] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 754.576496] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52716aa0-3552-3aab-14bf-4071476328d5" [ 754.576496] env[69994]: _type = "Task" [ 754.576496] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.588945] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52716aa0-3552-3aab-14bf-4071476328d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.602122] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925308, 'name': CreateVM_Task, 'duration_secs': 0.472722} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.602814] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.602954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.603141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.603441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.605984] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92bfec16-5963-4aa5-a003-9a2959543a5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.609922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.618042] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 754.618042] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ede700-8f64-4d2a-a31a-7c1b0f4abf88" [ 754.618042] env[69994]: _type = "Task" [ 754.618042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.643805] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ede700-8f64-4d2a-a31a-7c1b0f4abf88, 'name': SearchDatastore_Task, 'duration_secs': 0.011893} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.645773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.647764] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 754.647764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.647764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.647764] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.650128] env[69994]: INFO nova.compute.manager [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Took 40.35 seconds to build instance. [ 754.650460] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc93f84b-ca9c-4720-bbfd-4a2015a9df6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.663609] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.663821] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 754.665914] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e798d741-76b8-4bff-bd65-03d75ca541c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.677207] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 754.677207] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e63b22-f92b-7e09-8b17-8036dbcc18b7" [ 754.677207] env[69994]: _type = "Task" [ 754.677207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.688533] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e63b22-f92b-7e09-8b17-8036dbcc18b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.900727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea20878-3944-4b4f-9ad1-5e65a2cc40bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.909909] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7291288-98a8-4979-82ef-cf694effa94a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.950008] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd80e87-ca46-4a8d-88c3-50eeb59fb166 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.958567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78ed7f4-6ab0-4392-8a3e-b7f28a089e85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.975127] env[69994]: DEBUG nova.compute.provider_tree [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.977154] env[69994]: DEBUG nova.network.neutron [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updated VIF entry in instance network info cache for port da926370-b1f8-440c-a006-0135408e8d6f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.977508] env[69994]: DEBUG nova.network.neutron [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating instance_info_cache with network_info: [{"id": "da926370-b1f8-440c-a006-0135408e8d6f", "address": "fa:16:3e:bf:e4:7d", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda926370-b1", "ovs_interfaceid": "da926370-b1f8-440c-a006-0135408e8d6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.068348] env[69994]: DEBUG nova.network.neutron [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Updated VIF entry in instance network info cache for port 92b4ab11-10bb-4f7a-8820-908a4fb30d9b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.068762] env[69994]: DEBUG nova.network.neutron [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Updating instance_info_cache with network_info: [{"id": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "address": "fa:16:3e:23:62:56", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b4ab11-10", "ovs_interfaceid": "92b4ab11-10bb-4f7a-8820-908a4fb30d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.087113] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52716aa0-3552-3aab-14bf-4071476328d5, 'name': SearchDatastore_Task, 'duration_secs': 0.012199} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.087412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.087633] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.087859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.088072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.088270] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.088522] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea5b3a41-e5e6-4011-945e-56718ee841fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.097812] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.097994] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.098742] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df5b726-2508-4a2f-84f9-231add65fe3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.105158] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 755.105158] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242324a-f95d-3e16-db35-232b8e5c1183" [ 755.105158] env[69994]: _type = "Task" [ 755.105158] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.114016] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242324a-f95d-3e16-db35-232b8e5c1183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.155827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b7615b1-55d1-4c8b-8b85-e3aacaaca9a1 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.273s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.187897] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e63b22-f92b-7e09-8b17-8036dbcc18b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011473} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.188901] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3818f27-8b39-4417-b194-a5ad651d8ce5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.195416] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 755.195416] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df76-b2e8-d403-afcd-61d7b1417b89" [ 755.195416] env[69994]: _type = "Task" [ 755.195416] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.203994] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df76-b2e8-d403-afcd-61d7b1417b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.480668] env[69994]: DEBUG nova.scheduler.client.report [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.485334] env[69994]: DEBUG oslo_concurrency.lockutils [req-6b071624-26f2-49a9-97f7-9cc78a7046e4 req-9c8cdb64-bc45-4095-95dd-a4be4b688fdf service nova] Releasing lock "refresh_cache-ab320e59-febb-4f8f-9bc4-74227d29c752" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.570967] env[69994]: DEBUG oslo_concurrency.lockutils [req-b09b060a-2f5a-431e-8295-fc05f6ef5a03 req-2c1ddf61-16d1-4315-9a97-fd5f9fdc0981 service nova] Releasing lock "refresh_cache-180b4236-289c-4818-885d-c66e9e9a2ea8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.619737] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5242324a-f95d-3e16-db35-232b8e5c1183, 'name': SearchDatastore_Task, 'duration_secs': 0.010994} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.620671] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ddc517-893d-4a3d-b1ba-5b4f0db88f42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.626403] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 755.626403] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f62fe8-d224-9be9-3d27-a1dd191bc652" [ 755.626403] env[69994]: _type = "Task" [ 755.626403] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.635174] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f62fe8-d224-9be9-3d27-a1dd191bc652, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.658812] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.707060] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df76-b2e8-d403-afcd-61d7b1417b89, 'name': SearchDatastore_Task, 'duration_secs': 0.012733} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.707606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.707866] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 180b4236-289c-4818-885d-c66e9e9a2ea8/180b4236-289c-4818-885d-c66e9e9a2ea8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 755.708234] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52b42b7d-1046-4b92-bc5f-28ca7ca3ced4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.716536] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 755.716536] env[69994]: value = "task-2925309" [ 755.716536] env[69994]: _type = "Task" [ 755.716536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.725097] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.986508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.989154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.990745] env[69994]: INFO nova.compute.claims [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.012860] env[69994]: INFO nova.scheduler.client.report [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleted allocations for instance c512ee01-7d45-49f0-b2ce-659392527264 [ 756.138914] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f62fe8-d224-9be9-3d27-a1dd191bc652, 'name': SearchDatastore_Task, 'duration_secs': 0.013389} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.139239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.139561] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e0764e41-0810-45a1-8917-ac901f0f8321/e0764e41-0810-45a1-8917-ac901f0f8321.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.139842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7e253e0-83b4-4afb-a41c-6df39f0296ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.148883] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 756.148883] env[69994]: value = "task-2925310" [ 756.148883] env[69994]: _type = "Task" [ 756.148883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.158867] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.182899] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.228354] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.228606] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 180b4236-289c-4818-885d-c66e9e9a2ea8/180b4236-289c-4818-885d-c66e9e9a2ea8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 756.228824] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 756.229127] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f261504-2794-4d1a-bb7f-29c7068b2d8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.237239] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 756.237239] env[69994]: value = "task-2925311" [ 756.237239] env[69994]: _type = "Task" [ 756.237239] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.248512] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.521106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aeed5118-5ed8-4b9d-ac9f-0df8d58e1317 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "c512ee01-7d45-49f0-b2ce-659392527264" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.737s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.660869] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925310, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510099} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.661185] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e0764e41-0810-45a1-8917-ac901f0f8321/e0764e41-0810-45a1-8917-ac901f0f8321.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 756.661407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 756.661714] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e6b2ad1-b47b-479e-af4f-2065d5c11ff3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.671028] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 756.671028] env[69994]: value = "task-2925312" [ 756.671028] env[69994]: _type = "Task" [ 756.671028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.679832] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.748162] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069579} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.748586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.749292] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88151a2c-87ef-481e-8702-5880f847f1e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.773555] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 180b4236-289c-4818-885d-c66e9e9a2ea8/180b4236-289c-4818-885d-c66e9e9a2ea8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.773897] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4aa17c7e-af14-42e2-abdd-82ab6b835af7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.794710] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 756.794710] env[69994]: value = "task-2925313" [ 756.794710] env[69994]: _type = "Task" [ 756.794710] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.803709] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.182785] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.182785] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.184094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c5f651-9e2b-4c10-914e-72ff0747f7f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.211069] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e0764e41-0810-45a1-8917-ac901f0f8321/e0764e41-0810-45a1-8917-ac901f0f8321.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.213654] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-387a1eaa-a4f2-4841-840c-1996e0e8a38c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.235064] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 757.235064] env[69994]: value = "task-2925314" [ 757.235064] env[69994]: _type = "Task" [ 757.235064] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.245602] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.311353] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925313, 'name': ReconfigVM_Task, 'duration_secs': 0.306486} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.311710] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 180b4236-289c-4818-885d-c66e9e9a2ea8/180b4236-289c-4818-885d-c66e9e9a2ea8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 757.316193] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-508daf88-7b6a-4005-a619-8d3084a3e525 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.325135] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 757.325135] env[69994]: value = "task-2925315" [ 757.325135] env[69994]: _type = "Task" [ 757.325135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.336533] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925315, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.431587] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2274edd-0de4-47ee-8d58-3e4dc20f990d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.440679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8361816f-0dba-4fa9-b6d6-fbc59b74eb87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.475876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32148f99-9250-4ba4-a377-d33f28a0759e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.485925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaa70c0-a548-4f04-a05a-5cecbf1c0ce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.502823] env[69994]: DEBUG nova.compute.provider_tree [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.745943] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925314, 'name': ReconfigVM_Task, 'duration_secs': 0.311442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.746269] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e0764e41-0810-45a1-8917-ac901f0f8321/e0764e41-0810-45a1-8917-ac901f0f8321.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 757.746890] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9997509c-44e3-4ded-ac82-5f01450dcfe3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.754594] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 757.754594] env[69994]: value = "task-2925316" [ 757.754594] env[69994]: _type = "Task" [ 757.754594] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.764642] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925316, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.836704] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925315, 'name': Rename_Task, 'duration_secs': 0.157886} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.836850] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.836985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6620066a-0eb8-4507-b700-ab5c88ea02b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.844337] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 757.844337] env[69994]: value = "task-2925317" [ 757.844337] env[69994]: _type = "Task" [ 757.844337] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.853573] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.008575] env[69994]: DEBUG nova.scheduler.client.report [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.269121] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925316, 'name': Rename_Task, 'duration_secs': 0.219614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.269413] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.269665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55d5620e-d3cf-4280-a025-8ad6234979ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.276650] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 758.276650] env[69994]: value = "task-2925318" [ 758.276650] env[69994]: _type = "Task" [ 758.276650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.295551] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.356792] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925317, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.514737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.515402] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.518298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.372s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.518914] env[69994]: DEBUG nova.objects.instance [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lazy-loading 'resources' on Instance uuid 367665db-def4-4148-a316-b83378e00ba8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 758.788234] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925318, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.856439] env[69994]: DEBUG oslo_vmware.api [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925317, 'name': PowerOnVM_Task, 'duration_secs': 0.560767} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.856718] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.856919] env[69994]: INFO nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Took 7.11 seconds to spawn the instance on the hypervisor. [ 758.857117] env[69994]: DEBUG nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.857916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af44227-2b6d-47b1-bfbc-14ba0d7d4c18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.022565] env[69994]: DEBUG nova.compute.utils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 759.025722] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.025722] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.077583] env[69994]: DEBUG nova.policy [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fec48b3a663741fe9c701ab7e2105b29', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa48a4bba9de4f50b8ed79f61dd5d4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.117168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "f6408fad-a6b8-4868-a192-3acd065935ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.117416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.292642] env[69994]: DEBUG oslo_vmware.api [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925318, 'name': PowerOnVM_Task, 'duration_secs': 0.597247} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.293020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 759.293403] env[69994]: INFO nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Took 10.12 seconds to spawn the instance on the hypervisor. [ 759.293591] env[69994]: DEBUG nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.294537] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7eb56d-e081-4d08-8fb2-4ca584dc6a57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.345583] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Successfully created port: 2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.384651] env[69994]: INFO nova.compute.manager [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Took 42.34 seconds to build instance. [ 759.463479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a9b0f8-ca32-4510-b691-5d719be06a67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.475014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af925db-390a-454a-abb1-a652d85ac635 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.506638] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebf1805-04fb-474c-92f5-a462ffa133bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.516077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0219ad-b842-4aa8-912b-9f5d2dfd0719 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.537088] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.539253] env[69994]: DEBUG nova.compute.provider_tree [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.818927] env[69994]: INFO nova.compute.manager [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Took 44.33 seconds to build instance. [ 759.886646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd7eecc6-eed4-4d9e-9842-2fca4ba99132 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.689s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.044707] env[69994]: DEBUG nova.scheduler.client.report [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.321680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c79ccb3b-7e2c-41b9-9925-421697071242 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.800s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.390200] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 760.550280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.032s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.553580] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.555618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.199s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.556946] env[69994]: INFO nova.compute.claims [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.576369] env[69994]: INFO nova.scheduler.client.report [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted allocations for instance 367665db-def4-4148-a316-b83378e00ba8 [ 760.582600] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.582844] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.583062] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.583244] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.583398] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.583545] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.583755] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.583915] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.584596] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.584596] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.584681] env[69994]: DEBUG nova.virt.hardware [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.585719] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8fbe42-17f0-4771-99c3-17fdc52149d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.595423] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde33811-3a02-4fc5-bedc-a99ce72ed004 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.824367] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 760.927984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.989694] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Successfully updated port: 2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.004075] env[69994]: DEBUG nova.compute.manager [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Received event network-vif-plugged-2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 761.004322] env[69994]: DEBUG oslo_concurrency.lockutils [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] Acquiring lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.004547] env[69994]: DEBUG oslo_concurrency.lockutils [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.004737] env[69994]: DEBUG oslo_concurrency.lockutils [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.004913] env[69994]: DEBUG nova.compute.manager [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] No waiting events found dispatching network-vif-plugged-2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.006023] env[69994]: WARNING nova.compute.manager [req-95863e55-1ebd-471a-b80b-73dd47a172d0 req-4f3118c6-9939-4ddc-b7d5-d4a8b31385f9 service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Received unexpected event network-vif-plugged-2523e77a-138d-4684-9f91-02c4e2e85f27 for instance with vm_state building and task_state spawning. [ 761.093022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ebf0445-ad79-4597-9f15-cd8b064d6a71 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "367665db-def4-4148-a316-b83378e00ba8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.446s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.363897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.494808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.494808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.494982] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.975648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.975899] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.986480] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6078933-a2ac-4bcf-b6b6-801942670dd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.995199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3925a8d0-8612-4a2b-80bc-e7df0c1c26ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.004200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.004433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.004604] env[69994]: DEBUG nova.compute.manager [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.004859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "67f5ad56-9455-43fc-b940-8a67974703cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.005085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.006083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cf9fd3-0026-4173-8892-3ed44db5c485 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.037484] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e63a21-868b-4341-8a91-ba7db5ddaeaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.042098] env[69994]: DEBUG nova.compute.manager [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 762.042688] env[69994]: DEBUG nova.objects.instance [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'flavor' on Instance uuid a589ddb9-947b-4ff4-94f6-1fab4bdb874b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 762.044480] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.051948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdcea48-a90a-4aa6-9be6-ffe8fad8cdb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.068974] env[69994]: DEBUG nova.compute.provider_tree [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.190556] env[69994]: DEBUG nova.network.neutron [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Updating instance_info_cache with network_info: [{"id": "2523e77a-138d-4684-9f91-02c4e2e85f27", "address": "fa:16:3e:06:2d:91", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2523e77a-13", "ovs_interfaceid": "2523e77a-138d-4684-9f91-02c4e2e85f27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.574030] env[69994]: DEBUG nova.scheduler.client.report [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.693481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.693996] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Instance network_info: |[{"id": "2523e77a-138d-4684-9f91-02c4e2e85f27", "address": "fa:16:3e:06:2d:91", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2523e77a-13", "ovs_interfaceid": "2523e77a-138d-4684-9f91-02c4e2e85f27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 762.694497] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:2d:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2523e77a-138d-4684-9f91-02c4e2e85f27', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.702099] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 762.702334] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.702566] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03b3fde6-bf33-40b5-a795-6e22b4581eda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.725133] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.725133] env[69994]: value = "task-2925319" [ 762.725133] env[69994]: _type = "Task" [ 762.725133] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.733983] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925319, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.034299] env[69994]: DEBUG nova.compute.manager [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Received event network-changed-2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.034571] env[69994]: DEBUG nova.compute.manager [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Refreshing instance network info cache due to event network-changed-2523e77a-138d-4684-9f91-02c4e2e85f27. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 763.034854] env[69994]: DEBUG oslo_concurrency.lockutils [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] Acquiring lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.035065] env[69994]: DEBUG oslo_concurrency.lockutils [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] Acquired lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.035279] env[69994]: DEBUG nova.network.neutron [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Refreshing network info cache for port 2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.052669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.053544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0953ea57-c5bf-41d8-9a43-64da7a512425 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.062397] env[69994]: DEBUG oslo_vmware.api [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 763.062397] env[69994]: value = "task-2925320" [ 763.062397] env[69994]: _type = "Task" [ 763.062397] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.073162] env[69994]: DEBUG oslo_vmware.api [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.079140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.079742] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.082626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.850s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.082776] env[69994]: DEBUG nova.objects.instance [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 763.238406] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925319, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.572846] env[69994]: DEBUG oslo_vmware.api [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925320, 'name': PowerOffVM_Task, 'duration_secs': 0.215732} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.573169] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 763.573378] env[69994]: DEBUG nova.compute.manager [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.574185] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc19425-1a67-449d-9f79-e4889e805d27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.587234] env[69994]: DEBUG nova.compute.utils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.591378] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 763.591533] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.678633] env[69994]: DEBUG nova.policy [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866b55f082141239e62c9437c5db8be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f632b166593c4f6bb1d6e8b795f9e2e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.749295] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925319, 'name': CreateVM_Task, 'duration_secs': 0.703541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.749472] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.752252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.752466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.752786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 763.753083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccb69f98-e229-4033-a7e0-1cf2e508434d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.759018] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 763.759018] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e5bf29-667c-c587-ac90-844e43df5bc8" [ 763.759018] env[69994]: _type = "Task" [ 763.759018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.771800] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e5bf29-667c-c587-ac90-844e43df5bc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.924514] env[69994]: DEBUG nova.network.neutron [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Updated VIF entry in instance network info cache for port 2523e77a-138d-4684-9f91-02c4e2e85f27. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.924906] env[69994]: DEBUG nova.network.neutron [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Updating instance_info_cache with network_info: [{"id": "2523e77a-138d-4684-9f91-02c4e2e85f27", "address": "fa:16:3e:06:2d:91", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2523e77a-13", "ovs_interfaceid": "2523e77a-138d-4684-9f91-02c4e2e85f27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.088494] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Successfully created port: 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.091476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3209f85c-9db0-4467-a515-dc73386640c9 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.087s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.093572] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 764.098625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b7984d45-9ff2-4a06-84eb-1828dccb116d tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.099853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.733s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.101842] env[69994]: INFO nova.compute.claims [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.274637] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e5bf29-667c-c587-ac90-844e43df5bc8, 'name': SearchDatastore_Task, 'duration_secs': 0.016438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.275190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.275616] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.276061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.276405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.276777] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.277226] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d774f0b-fcd1-48b9-b6d9-be0647af9800 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.289800] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.289800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.291205] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e35f762-97c1-442d-9d60-4c65096b1b94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.302266] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 764.302266] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d709fe-587b-fdde-2a76-e5ea135eb46f" [ 764.302266] env[69994]: _type = "Task" [ 764.302266] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.316040] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d709fe-587b-fdde-2a76-e5ea135eb46f, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.323920] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc3515bf-0d0c-4218-a8af-df449a744edc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.329906] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 764.329906] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f6ff8-feb7-6918-8ade-8a04e338eb0c" [ 764.329906] env[69994]: _type = "Task" [ 764.329906] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.338244] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f6ff8-feb7-6918-8ade-8a04e338eb0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.428227] env[69994]: DEBUG oslo_concurrency.lockutils [req-edf6bf64-1f5e-4ced-9fab-b85bf720a785 req-d52707d6-1820-44e1-8682-ea5aa591e13d service nova] Releasing lock "refresh_cache-6fb97a65-bf0b-4e79-9611-f0f3179661b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.533998] env[69994]: DEBUG nova.objects.instance [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'flavor' on Instance uuid a589ddb9-947b-4ff4-94f6-1fab4bdb874b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.840237] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f6ff8-feb7-6918-8ade-8a04e338eb0c, 'name': SearchDatastore_Task, 'duration_secs': 0.012298} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.840237] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.840237] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 6fb97a65-bf0b-4e79-9611-f0f3179661b5/6fb97a65-bf0b-4e79-9611-f0f3179661b5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.840237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8254ab27-7f86-4ad1-9bda-2b3b146a8420 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.848673] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 764.848673] env[69994]: value = "task-2925321" [ 764.848673] env[69994]: _type = "Task" [ 764.848673] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.860304] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.039563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.039724] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquired lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.039935] env[69994]: DEBUG nova.network.neutron [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.040182] env[69994]: DEBUG nova.objects.instance [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'info_cache' on Instance uuid a589ddb9-947b-4ff4-94f6-1fab4bdb874b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 765.112505] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.142658] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.142983] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.143195] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.143400] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.143601] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.143751] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.143966] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.144151] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.144323] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.144513] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.144770] env[69994]: DEBUG nova.virt.hardware [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.145880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e78be9-2796-4288-9f5b-a018a83b90ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.159188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89984c00-388d-41b3-b94a-485724d6ac57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.362203] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48779} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.362769] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 6fb97a65-bf0b-4e79-9611-f0f3179661b5/6fb97a65-bf0b-4e79-9611-f0f3179661b5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.362906] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.363602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ea4837d-222f-41a0-95b4-6c233f08b2c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.375575] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 765.375575] env[69994]: value = "task-2925322" [ 765.375575] env[69994]: _type = "Task" [ 765.375575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.388948] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.533026] env[69994]: DEBUG nova.compute.manager [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-vif-plugged-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.533259] env[69994]: DEBUG oslo_concurrency.lockutils [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] Acquiring lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.533445] env[69994]: DEBUG oslo_concurrency.lockutils [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.533617] env[69994]: DEBUG oslo_concurrency.lockutils [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.533782] env[69994]: DEBUG nova.compute.manager [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] No waiting events found dispatching network-vif-plugged-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.533946] env[69994]: WARNING nova.compute.manager [req-2240e3b6-3f5b-472d-8c39-be942f2e7407 req-36470b6b-f408-45b1-aa0f-c022c3f036cb service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received unexpected event network-vif-plugged-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 for instance with vm_state building and task_state spawning. [ 765.543149] env[69994]: DEBUG nova.objects.base [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 765.579272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6c7f4f-ac4b-4e61-acca-2a217a5eed7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.588145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9a1e56-6c87-449a-8cf3-2a5e685b3eb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.620422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef26937-19e7-43d2-bcf8-e5fa1f55c9a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.624648] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Successfully updated port: 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.634627] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d399727-a16f-441b-b9a8-21356741af5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.652238] env[69994]: DEBUG nova.compute.provider_tree [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 765.885608] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077568} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.885760] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.886627] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd52079-b3ed-4a3f-b306-a5965d46d22f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.909922] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 6fb97a65-bf0b-4e79-9611-f0f3179661b5/6fb97a65-bf0b-4e79-9611-f0f3179661b5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.910245] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e01fb88-9a17-4093-80f5-1fb9ccc430dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.937032] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 765.937032] env[69994]: value = "task-2925323" [ 765.937032] env[69994]: _type = "Task" [ 765.937032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.946131] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.129523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.129810] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.129810] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.174275] env[69994]: ERROR nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [req-25ee08e1-e69a-4163-8e5e-056c8bf8be20] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-25ee08e1-e69a-4163-8e5e-056c8bf8be20"}]} [ 766.190984] env[69994]: DEBUG nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 766.205847] env[69994]: DEBUG nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 766.206099] env[69994]: DEBUG nova.compute.provider_tree [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 766.218133] env[69994]: DEBUG nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 766.236291] env[69994]: DEBUG nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 766.265645] env[69994]: DEBUG nova.network.neutron [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updating instance_info_cache with network_info: [{"id": "77322171-a52e-49ee-a04c-5aecaebff021", "address": "fa:16:3e:4c:a0:a8", "network": {"id": "4a44e17e-7aac-4a89-abb5-0038f92e41fb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-226468376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fe28fa02f104475980a34f35dc815a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77322171-a5", "ovs_interfaceid": "77322171-a52e-49ee-a04c-5aecaebff021", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.451146] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.671776] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.687743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3992b63-d244-420d-a319-05948c43c610 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.698239] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5ef501-8d46-411e-80ef-9fdf81ae700e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.732058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920610eb-49dc-40de-a623-e64661c6245d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.740430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe8e250-2125-40b7-9e66-10a4c3da78f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.755325] env[69994]: DEBUG nova.compute.provider_tree [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 766.767327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Releasing lock "refresh_cache-a589ddb9-947b-4ff4-94f6-1fab4bdb874b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.853456] env[69994]: DEBUG nova.network.neutron [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.951186] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925323, 'name': ReconfigVM_Task, 'duration_secs': 0.703618} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.951496] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 6fb97a65-bf0b-4e79-9611-f0f3179661b5/6fb97a65-bf0b-4e79-9611-f0f3179661b5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.952160] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-538f95af-4789-477b-b13f-79ef9124ee80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.959482] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 766.959482] env[69994]: value = "task-2925324" [ 766.959482] env[69994]: _type = "Task" [ 766.959482] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.968459] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925324, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.287792] env[69994]: DEBUG nova.scheduler.client.report [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 767.288080] env[69994]: DEBUG nova.compute.provider_tree [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 60 to 61 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 767.288318] env[69994]: DEBUG nova.compute.provider_tree [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 767.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.356599] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Instance network_info: |[{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 767.356960] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:51:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e029825-6c65-4ac7-88f6-65f9d106db76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.364792] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Creating folder: Project (f632b166593c4f6bb1d6e8b795f9e2e6). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 767.365132] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0c6e558-ff30-4ed4-b4c5-df15a8d5d481 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.377955] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Created folder: Project (f632b166593c4f6bb1d6e8b795f9e2e6) in parent group-v587342. [ 767.378186] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Creating folder: Instances. Parent ref: group-v587453. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 767.378438] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77d780bb-8ea4-4f11-bf2f-d97f73cbd927 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.392534] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Created folder: Instances in parent group-v587453. [ 767.392534] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.392669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.392890] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfecd731-6b95-448b-b4a6-2feaa43a9c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.418900] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.418900] env[69994]: value = "task-2925327" [ 767.418900] env[69994]: _type = "Task" [ 767.418900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.427490] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925327, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.469447] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925324, 'name': Rename_Task, 'duration_secs': 0.14638} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.469739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.470019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc99e094-9a34-4be2-9d6f-95e78417bb32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.477990] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 767.477990] env[69994]: value = "task-2925328" [ 767.477990] env[69994]: _type = "Task" [ 767.477990] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.486247] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.562312] env[69994]: DEBUG nova.compute.manager [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.562521] env[69994]: DEBUG nova.compute.manager [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing instance network info cache due to event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 767.562763] env[69994]: DEBUG oslo_concurrency.lockutils [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.562905] env[69994]: DEBUG oslo_concurrency.lockutils [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.563295] env[69994]: DEBUG nova.network.neutron [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.772272] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.772657] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dd881bc-caef-4ecc-b2ef-0a8ed18db14e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.780526] env[69994]: DEBUG oslo_vmware.api [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 767.780526] env[69994]: value = "task-2925329" [ 767.780526] env[69994]: _type = "Task" [ 767.780526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.795103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.695s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.795658] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 767.798222] env[69994]: DEBUG oslo_vmware.api [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.798969] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.900s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.798969] env[69994]: DEBUG nova.objects.instance [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lazy-loading 'resources' on Instance uuid f36c29d1-b945-4afe-abbd-431e59de7cec {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.930234] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925327, 'name': CreateVM_Task, 'duration_secs': 0.378625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.930464] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.931079] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.931264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.931597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.931863] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6f4f0a7-37d3-4f37-9346-dd7062d69763 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.937558] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 767.937558] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbbde2-2f84-b9b1-abc9-1fd53a37a91d" [ 767.937558] env[69994]: _type = "Task" [ 767.937558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.946527] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbbde2-2f84-b9b1-abc9-1fd53a37a91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.987877] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925328, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.292849] env[69994]: DEBUG oslo_vmware.api [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925329, 'name': PowerOnVM_Task, 'duration_secs': 0.45105} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.292849] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.293269] env[69994]: DEBUG nova.compute.manager [None req-3ee07388-e46d-40a5-88a1-4326f72bafc2 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.293718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939c2233-3ef3-43b3-8111-bcaec5381cdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.297893] env[69994]: DEBUG nova.network.neutron [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updated VIF entry in instance network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.298227] env[69994]: DEBUG nova.network.neutron [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.300278] env[69994]: DEBUG nova.compute.utils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.304684] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.305131] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.359983] env[69994]: DEBUG nova.policy [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550fe2bfeab14f0fa409c65d98954e7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21bf4c6f3b2c45218949b0e6c1eb84fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.456560] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbbde2-2f84-b9b1-abc9-1fd53a37a91d, 'name': SearchDatastore_Task, 'duration_secs': 0.011248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.456937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.457234] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.457482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.457630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.457809] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.458090] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26eaceae-b5d6-49c6-9344-a7084500ed0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.466979] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.467107] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.468024] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-220897df-56f8-42ce-ab06-066e0c5dae84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.473533] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 768.473533] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b47dd8-b1cd-4464-e8eb-d26d4f175443" [ 768.473533] env[69994]: _type = "Task" [ 768.473533] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.491447] env[69994]: DEBUG oslo_vmware.api [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925328, 'name': PowerOnVM_Task, 'duration_secs': 0.598677} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.498168] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.498502] env[69994]: INFO nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Took 7.94 seconds to spawn the instance on the hypervisor. [ 768.499372] env[69994]: DEBUG nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.499372] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b47dd8-b1cd-4464-e8eb-d26d4f175443, 'name': SearchDatastore_Task, 'duration_secs': 0.009762} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.500237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1d278b-fcff-44cc-a98d-d4e38c7cb22c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.503028] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dbb6f32-4c36-4737-b925-29ab69fed620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.512977] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 768.512977] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b763bb-cfe7-e8c9-77c6-5777dea4299f" [ 768.512977] env[69994]: _type = "Task" [ 768.512977] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.528422] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b763bb-cfe7-e8c9-77c6-5777dea4299f, 'name': SearchDatastore_Task, 'duration_secs': 0.011298} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.529620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.529620] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9b6aca3c-337b-4067-80e0-487d956fabc7/9b6aca3c-337b-4067-80e0-487d956fabc7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.529620] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36eade9a-53d7-4bf5-990b-a8aff7a15768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.539369] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 768.539369] env[69994]: value = "task-2925330" [ 768.539369] env[69994]: _type = "Task" [ 768.539369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.551647] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.724604] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Successfully created port: f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.792012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d38d5aa-a614-4d7f-98b1-25cfa9ffda98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.801335] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec48a85-4a7f-478e-ba81-5acf85f56e9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.805704] env[69994]: DEBUG oslo_concurrency.lockutils [req-57afd4bd-b537-446e-be8d-00c609eab100 req-4fbfb621-d9ae-437a-813f-3adb2a5db64b service nova] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.810380] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 768.840845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0bd8da-e333-484e-ae63-5c4209d3acac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.850975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f755a0ba-a487-440d-a8f7-0b789d353905 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.868357] env[69994]: DEBUG nova.compute.provider_tree [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.028646] env[69994]: INFO nova.compute.manager [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Took 48.96 seconds to build instance. [ 769.051625] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.051625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9b6aca3c-337b-4067-80e0-487d956fabc7/9b6aca3c-337b-4067-80e0-487d956fabc7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.051625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.052806] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6aba1a88-664f-4e96-bfeb-a64e21bdde8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.059226] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 769.059226] env[69994]: value = "task-2925331" [ 769.059226] env[69994]: _type = "Task" [ 769.059226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.069358] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.371704] env[69994]: DEBUG nova.scheduler.client.report [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.532366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5beef88d-7809-4cb8-a4b7-c7b8633bd7e5 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.429s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.570505] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.570865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.572265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8adbf1-79e0-4479-b466-285d11e3e01b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.595942] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 9b6aca3c-337b-4067-80e0-487d956fabc7/9b6aca3c-337b-4067-80e0-487d956fabc7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.596288] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d95e17b5-b5af-426c-a388-05f06f82864c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.634055] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 769.634055] env[69994]: value = "task-2925332" [ 769.634055] env[69994]: _type = "Task" [ 769.634055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.645783] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.848653] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 769.874699] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 769.874984] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.875191] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.875388] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.875557] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.875709] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 769.875920] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 769.876125] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 769.876384] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 769.876600] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 769.876781] env[69994]: DEBUG nova.virt.hardware [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 769.877582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.079s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.880282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3cb162-6b22-469a-b851-cc6a084aeaf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.883290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.071s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.884723] env[69994]: INFO nova.compute.claims [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.894144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa062b8-7182-443b-ab59-706636b34551 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.909988] env[69994]: INFO nova.scheduler.client.report [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Deleted allocations for instance f36c29d1-b945-4afe-abbd-431e59de7cec [ 769.915700] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f08b92-9241-4058-92ba-f2f6f3307aeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.923313] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 769.923574] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c4a0ac39-488b-4399-a095-ceae08dd105e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.932189] env[69994]: DEBUG oslo_vmware.api [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] Waiting for the task: (returnval){ [ 769.932189] env[69994]: value = "task-2925333" [ 769.932189] env[69994]: _type = "Task" [ 769.932189] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.944028] env[69994]: DEBUG oslo_vmware.api [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] Task: {'id': task-2925333, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.035768] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 770.148790] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.419286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb97785c-a88f-46c8-bc64-063dfb1cb3ce tempest-ServersListShow2100Test-589147335 tempest-ServersListShow2100Test-589147335-project-member] Lock "f36c29d1-b945-4afe-abbd-431e59de7cec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.766s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.421786] env[69994]: DEBUG nova.compute.manager [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Received event network-vif-plugged-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.422035] env[69994]: DEBUG oslo_concurrency.lockutils [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] Acquiring lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.422288] env[69994]: DEBUG oslo_concurrency.lockutils [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.422454] env[69994]: DEBUG oslo_concurrency.lockutils [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.422639] env[69994]: DEBUG nova.compute.manager [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] No waiting events found dispatching network-vif-plugged-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.422823] env[69994]: WARNING nova.compute.manager [req-54feb5f9-44b6-460a-af34-84731e419358 req-75e86543-9b8e-48f0-8d6b-46d4d55e7059 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Received unexpected event network-vif-plugged-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 for instance with vm_state building and task_state spawning. [ 770.448468] env[69994]: DEBUG oslo_vmware.api [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] Task: {'id': task-2925333, 'name': SuspendVM_Task} progress is 45%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.535964] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Successfully updated port: f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.564715] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.646594] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925332, 'name': ReconfigVM_Task, 'duration_secs': 0.926705} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.646897] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 9b6aca3c-337b-4067-80e0-487d956fabc7/9b6aca3c-337b-4067-80e0-487d956fabc7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.647564] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ae2f6d1-9c8d-4ad7-98ee-da80a900877f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.656310] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 770.656310] env[69994]: value = "task-2925334" [ 770.656310] env[69994]: _type = "Task" [ 770.656310] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.666467] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925334, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.955450] env[69994]: DEBUG oslo_vmware.api [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] Task: {'id': task-2925333, 'name': SuspendVM_Task, 'duration_secs': 0.789072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.955731] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 770.955910] env[69994]: DEBUG nova.compute.manager [None req-d83fed74-6d6a-4203-975e-f1e711c96e39 tempest-ServersAdminNegativeTestJSON-780517593 tempest-ServersAdminNegativeTestJSON-780517593-project-admin] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.956729] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98765220-96bb-4af6-8179-a7c9ca5120d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.005153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "180b4236-289c-4818-885d-c66e9e9a2ea8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.005429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.005630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.005807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.006033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.008766] env[69994]: INFO nova.compute.manager [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Terminating instance [ 771.039441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.039609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.040409] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.172315] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925334, 'name': Rename_Task, 'duration_secs': 0.20067} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.172664] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.173327] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b759ac55-9091-497e-a9a9-edc2c1a0d337 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.181526] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 771.181526] env[69994]: value = "task-2925335" [ 771.181526] env[69994]: _type = "Task" [ 771.181526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.190886] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.329858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c04bade-a37c-4b8e-871f-00f8d0eee509 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.339014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cdc55b-1422-4805-b966-f04f0175790a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.376566] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599ebf24-7040-4fd5-8c4b-9e39a02c148f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.385417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bd929c-518a-47a7-9033-57034190a814 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.400130] env[69994]: DEBUG nova.compute.provider_tree [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.519637] env[69994]: DEBUG nova.compute.manager [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 771.519637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.519637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb90740-85ef-492d-8ee4-81689a8fa98c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.527993] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.528260] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3eb202ea-960d-4ef4-9a56-afa2d9b421be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.536498] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 771.536498] env[69994]: value = "task-2925336" [ 771.536498] env[69994]: _type = "Task" [ 771.536498] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.548058] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.579091] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.692893] env[69994]: DEBUG oslo_vmware.api [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925335, 'name': PowerOnVM_Task, 'duration_secs': 0.48779} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.693618] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.693618] env[69994]: INFO nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Took 6.58 seconds to spawn the instance on the hypervisor. [ 771.693618] env[69994]: DEBUG nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.694476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698dd910-bf29-4575-9689-8b857dd68488 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.800436] env[69994]: DEBUG nova.network.neutron [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Updating instance_info_cache with network_info: [{"id": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "address": "fa:16:3e:a6:34:67", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3fc1328-25", "ovs_interfaceid": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.903709] env[69994]: DEBUG nova.scheduler.client.report [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 772.050404] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925336, 'name': PowerOffVM_Task, 'duration_secs': 0.247103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.050753] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.050974] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.051643] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f09650ee-ddab-471a-8885-5ce8d1ebdadd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.127675] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.127924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.128305] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleting the datastore file [datastore2] 180b4236-289c-4818-885d-c66e9e9a2ea8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.128401] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46b4b553-29a9-4d4e-8a91-f8ffa84b9094 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.138226] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 772.138226] env[69994]: value = "task-2925338" [ 772.138226] env[69994]: _type = "Task" [ 772.138226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.146937] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.217878] env[69994]: INFO nova.compute.manager [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Took 47.89 seconds to build instance. [ 772.302951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.303481] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance network_info: |[{"id": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "address": "fa:16:3e:a6:34:67", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3fc1328-25", "ovs_interfaceid": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.304094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:34:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3fc1328-25fb-4ac4-ab6e-c522b2ccc666', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.311627] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.311900] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.312097] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbb884a0-eae4-4787-a4a4-e85c9e4abf0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.332538] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.332538] env[69994]: value = "task-2925339" [ 772.332538] env[69994]: _type = "Task" [ 772.332538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.341827] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925339, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.408679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.409280] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 772.412086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 33.034s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.455550] env[69994]: DEBUG nova.compute.manager [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Received event network-changed-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.455765] env[69994]: DEBUG nova.compute.manager [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Refreshing instance network info cache due to event network-changed-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 772.455982] env[69994]: DEBUG oslo_concurrency.lockutils [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] Acquiring lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.456271] env[69994]: DEBUG oslo_concurrency.lockutils [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] Acquired lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.456493] env[69994]: DEBUG nova.network.neutron [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Refreshing network info cache for port f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.649169] env[69994]: DEBUG oslo_vmware.api [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.649549] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.649685] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.649878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.650081] env[69994]: INFO nova.compute.manager [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 772.650335] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.650487] env[69994]: DEBUG nova.compute.manager [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 772.650578] env[69994]: DEBUG nova.network.neutron [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.721848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1fb4abfa-adf7-4d70-ab36-9a6dcc6b86aa tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.993s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.846306] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925339, 'name': CreateVM_Task, 'duration_secs': 0.414253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.846497] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.847111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.847280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.847634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 772.847901] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a41b5bd7-037a-4686-a8ad-fa740dff747a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.853210] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 772.853210] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5208be96-75a1-58c8-7f59-741dca8a9c46" [ 772.853210] env[69994]: _type = "Task" [ 772.853210] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.862835] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5208be96-75a1-58c8-7f59-741dca8a9c46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.924297] env[69994]: DEBUG nova.compute.utils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.930114] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.930114] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 773.020138] env[69994]: DEBUG nova.policy [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b75a87a5b39f410eb61164ce3089b838', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f936106f0d2844d8844bea3f589aaab9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.224601] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.367198] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5208be96-75a1-58c8-7f59-741dca8a9c46, 'name': SearchDatastore_Task, 'duration_secs': 0.011631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.367198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.367198] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.367198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.367198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.367198] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.367198] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebe86e37-9d70-47ce-9fc9-80841cc47936 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.379161] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.379505] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.380428] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f184458-3ab3-406a-907f-d681fe6af000 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.388439] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 773.388439] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273acdd-139e-3ee5-93bc-341c482e99c6" [ 773.388439] env[69994]: _type = "Task" [ 773.388439] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.398568] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273acdd-139e-3ee5-93bc-341c482e99c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.432343] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 773.446730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.447394] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.447394] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.447522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.447838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.454287] env[69994]: INFO nova.compute.manager [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Terminating instance [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 298a4d59-733f-4cda-a9c2-80dc21be91ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 627f89ad-0381-4de9-a429-c74e26975ce9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 75e952e7-6761-49a4-9193-175f5d30494e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9e9973e1-feb8-4fd7-95ae-e6d824af5a64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 6aacfc4e-32b4-40d7-8240-e4449cf78925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 63d6a59a-d58c-4179-ad39-eb9863e6f84c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1693ccdf-ea72-45d5-8b34-e2b0e155e528 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 773.464162] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance b00d09ea-5eee-47ed-adcb-288cdd362e89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.464162] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 773.466593] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance e46b8a11-650a-4e34-bc4a-e1c1b2515e76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.466821] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ab320e59-febb-4f8f-9bc4-74227d29c752 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.466959] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance a589ddb9-947b-4ff4-94f6-1fab4bdb874b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467111] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance e0764e41-0810-45a1-8917-ac901f0f8321 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467205] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 180b4236-289c-4818-885d-c66e9e9a2ea8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467316] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 6fb97a65-bf0b-4e79-9611-f0f3179661b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467424] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9b6aca3c-337b-4067-80e0-487d956fabc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467581] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 00ab07b7-e7ed-4a71-b684-d5af8b1b7616 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.467632] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 03a10403-0253-4df0-84b2-1e56f0c057fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 773.469130] env[69994]: DEBUG nova.network.neutron [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.678649] env[69994]: DEBUG nova.network.neutron [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Updated VIF entry in instance network info cache for port f3fc1328-25fb-4ac4-ab6e-c522b2ccc666. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.679033] env[69994]: DEBUG nova.network.neutron [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Updating instance_info_cache with network_info: [{"id": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "address": "fa:16:3e:a6:34:67", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3fc1328-25", "ovs_interfaceid": "f3fc1328-25fb-4ac4-ab6e-c522b2ccc666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.755286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.871975] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Successfully created port: 522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.900642] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273acdd-139e-3ee5-93bc-341c482e99c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011222} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.901624] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3437b490-8c5c-4a1f-bac0-69d2d5c21e05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.908501] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 773.908501] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262713e-2f6c-9cef-d166-b1c344fd7b18" [ 773.908501] env[69994]: _type = "Task" [ 773.908501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.918571] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262713e-2f6c-9cef-d166-b1c344fd7b18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.961028] env[69994]: DEBUG nova.compute.manager [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 773.961028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.961028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f745fd-e062-43c0-b83c-b2d16d987919 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.970020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 773.970840] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea377701-07a8-4ead-9eb5-4a31c856488c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.973250] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f1f0d79f-dc67-4cf9-816c-c451f20d65ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.974787] env[69994]: INFO nova.compute.manager [-] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Took 1.32 seconds to deallocate network for instance. [ 774.046125] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.046370] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.046680] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleting the datastore file [datastore2] 6fb97a65-bf0b-4e79-9611-f0f3179661b5 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.046814] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aae0bdd8-6799-4e00-ae3d-43968d47eba3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.054177] env[69994]: DEBUG oslo_vmware.api [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 774.054177] env[69994]: value = "task-2925341" [ 774.054177] env[69994]: _type = "Task" [ 774.054177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.069785] env[69994]: DEBUG oslo_vmware.api [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.181882] env[69994]: DEBUG oslo_concurrency.lockutils [req-dce1ded8-fcd5-4ca4-96ae-cc39d16ea04e req-b59fda0e-8e0a-45b0-9db7-92ad83e72b0a service nova] Releasing lock "refresh_cache-00ab07b7-e7ed-4a71-b684-d5af8b1b7616" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.421356] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262713e-2f6c-9cef-d166-b1c344fd7b18, 'name': SearchDatastore_Task, 'duration_secs': 0.010308} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.421654] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.421937] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 00ab07b7-e7ed-4a71-b684-d5af8b1b7616/00ab07b7-e7ed-4a71-b684-d5af8b1b7616.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 774.422241] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ea4c510-719b-4097-84c4-acfa52ef0b91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.431345] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 774.431345] env[69994]: value = "task-2925342" [ 774.431345] env[69994]: _type = "Task" [ 774.431345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.442228] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 774.444506] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.464766] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.465205] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.465266] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.465433] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.465585] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.465739] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.465968] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.466134] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.466307] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.466477] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.466648] env[69994]: DEBUG nova.virt.hardware [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.467538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebc930b-0a6e-40d0-a888-8e61008d3251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.476845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffc7bc3-b732-48be-9570-8b1289e4b5a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.481225] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9a1343a8-11b4-4c9e-8445-931eab036a4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.483225] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.565768] env[69994]: DEBUG oslo_vmware.api [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164838} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.566043] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 774.566259] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 774.566450] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.566620] env[69994]: INFO nova.compute.manager [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 774.566852] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 774.567053] env[69994]: DEBUG nova.compute.manager [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 774.567150] env[69994]: DEBUG nova.network.neutron [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.570489] env[69994]: DEBUG nova.compute.manager [req-e5d3edbb-9ea1-49e5-8d7b-14972995b29d req-de99b0e5-7cb3-4477-8971-851901b47816 service nova] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Received event network-vif-deleted-92b4ab11-10bb-4f7a-8820-908a4fb30d9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.941928] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.942364] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 00ab07b7-e7ed-4a71-b684-d5af8b1b7616/00ab07b7-e7ed-4a71-b684-d5af8b1b7616.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 774.942455] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.942748] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d27c999-b372-43ad-9e15-0c9da6efbc3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.949448] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 774.949448] env[69994]: value = "task-2925343" [ 774.949448] env[69994]: _type = "Task" [ 774.949448] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.957534] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.985429] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance c06a2540-e77d-48c0-967f-94e2a53c4d8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.161874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.162142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.404503] env[69994]: DEBUG nova.network.neutron [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.460285] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071409} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.460576] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.461394] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3480e8bc-afe4-4211-985a-3b1ab8ec85fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.486615] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 00ab07b7-e7ed-4a71-b684-d5af8b1b7616/00ab07b7-e7ed-4a71-b684-d5af8b1b7616.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.487514] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da305709-d3d9-4396-99b0-263e9b353c38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.510150] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Successfully updated port: 522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.512190] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.520431] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 775.520431] env[69994]: value = "task-2925344" [ 775.520431] env[69994]: _type = "Task" [ 775.520431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.531346] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.907258] env[69994]: INFO nova.compute.manager [-] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Took 1.34 seconds to deallocate network for instance. [ 776.013859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.014118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquired lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.014186] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.015664] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9269e42b-b05c-4c88-9008-aaeda4b0248f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.031736] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925344, 'name': ReconfigVM_Task, 'duration_secs': 0.287928} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.031998] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 00ab07b7-e7ed-4a71-b684-d5af8b1b7616/00ab07b7-e7ed-4a71-b684-d5af8b1b7616.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 776.032606] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8b7e241-7645-4e23-8e23-9671cb4f219e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.039988] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 776.039988] env[69994]: value = "task-2925345" [ 776.039988] env[69994]: _type = "Task" [ 776.039988] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.048651] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925345, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.413737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.520265] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 3c814c83-20cc-4871-9f30-5c0c7d99b8a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.546580] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.551714] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925345, 'name': Rename_Task, 'duration_secs': 0.15401} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.551964] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.552215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-775d9389-6eb3-4f9a-8a31-02092158ba9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.559992] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 776.559992] env[69994]: value = "task-2925346" [ 776.559992] env[69994]: _type = "Task" [ 776.559992] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.568896] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.667328] env[69994]: DEBUG nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Received event network-vif-deleted-2523e77a-138d-4684-9f91-02c4e2e85f27 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.667538] env[69994]: DEBUG nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Received event network-vif-plugged-522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.667768] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Acquiring lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.668022] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.668829] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.669128] env[69994]: DEBUG nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] No waiting events found dispatching network-vif-plugged-522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.669383] env[69994]: WARNING nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Received unexpected event network-vif-plugged-522a6fbf-6853-4dc5-ba10-82f41421ee4a for instance with vm_state building and task_state spawning. [ 776.669683] env[69994]: DEBUG nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Received event network-changed-522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.669846] env[69994]: DEBUG nova.compute.manager [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Refreshing instance network info cache due to event network-changed-522a6fbf-6853-4dc5-ba10-82f41421ee4a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 776.670089] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Acquiring lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.711710] env[69994]: DEBUG nova.network.neutron [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Updating instance_info_cache with network_info: [{"id": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "address": "fa:16:3e:67:db:8e", "network": {"id": "8e6b1f4f-ae98-4c54-be06-d2a3631344d6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-946732901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f936106f0d2844d8844bea3f589aaab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap522a6fbf-68", "ovs_interfaceid": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.022889] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 956306bc-4701-4c04-8221-8ec0b9df73ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 777.071556] env[69994]: DEBUG oslo_vmware.api [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925346, 'name': PowerOnVM_Task, 'duration_secs': 0.494733} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.071556] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.071692] env[69994]: INFO nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Took 7.22 seconds to spawn the instance on the hypervisor. [ 777.071796] env[69994]: DEBUG nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.074113] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b0aa1f-7148-4be8-a84e-c077904da5ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.214740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Releasing lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.214740] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Instance network_info: |[{"id": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "address": "fa:16:3e:67:db:8e", "network": {"id": "8e6b1f4f-ae98-4c54-be06-d2a3631344d6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-946732901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f936106f0d2844d8844bea3f589aaab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap522a6fbf-68", "ovs_interfaceid": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 777.215226] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Acquired lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.215226] env[69994]: DEBUG nova.network.neutron [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Refreshing network info cache for port 522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.216276] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:db:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f40f5c4-c146-449c-884d-6f884dcf2acf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '522a6fbf-6853-4dc5-ba10-82f41421ee4a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.229780] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Creating folder: Project (f936106f0d2844d8844bea3f589aaab9). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.232873] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9355ad1-5d4a-4ed4-86e4-fd5ea3e8e14b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.245716] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Created folder: Project (f936106f0d2844d8844bea3f589aaab9) in parent group-v587342. [ 777.245923] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Creating folder: Instances. Parent ref: group-v587457. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.246165] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67286b39-ddec-43af-9afa-2112e9a0c62c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.257151] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Created folder: Instances in parent group-v587457. [ 777.257413] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.257608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.257820] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e250ca59-8592-40f8-92fb-b64dc74fdf08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.283995] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.283995] env[69994]: value = "task-2925349" [ 777.283995] env[69994]: _type = "Task" [ 777.283995] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.292164] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925349, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.455274] env[69994]: DEBUG nova.network.neutron [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Updated VIF entry in instance network info cache for port 522a6fbf-6853-4dc5-ba10-82f41421ee4a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.455785] env[69994]: DEBUG nova.network.neutron [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Updating instance_info_cache with network_info: [{"id": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "address": "fa:16:3e:67:db:8e", "network": {"id": "8e6b1f4f-ae98-4c54-be06-d2a3631344d6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-946732901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f936106f0d2844d8844bea3f589aaab9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap522a6fbf-68", "ovs_interfaceid": "522a6fbf-6853-4dc5-ba10-82f41421ee4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.526373] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 777.592020] env[69994]: INFO nova.compute.manager [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Took 44.25 seconds to build instance. [ 777.598255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.598448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.598687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.598881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.599062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.600722] env[69994]: INFO nova.compute.manager [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Terminating instance [ 777.795385] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925349, 'name': CreateVM_Task, 'duration_secs': 0.294415} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.795596] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.796266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.796446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.796766] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.797039] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14aa0f1e-9df3-43f9-9fc0-180095171046 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.802371] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 777.802371] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cba61f-0fd5-2236-a1f5-6ab65fb874ce" [ 777.802371] env[69994]: _type = "Task" [ 777.802371] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.810966] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cba61f-0fd5-2236-a1f5-6ab65fb874ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.959448] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c1d5236-a570-4091-9734-d3c231cb1498 req-69330bbe-d0a7-41bf-8486-5ba3837301c8 service nova] Releasing lock "refresh_cache-03a10403-0253-4df0-84b2-1e56f0c057fe" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.029929] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f6408fad-a6b8-4868-a192-3acd065935ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.094135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d4be778-0a6c-4166-b76a-3ebcce6e5a06 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.946s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.104630] env[69994]: DEBUG nova.compute.manager [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 778.104917] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.106021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70389dfb-e9b9-4a27-bf9c-4a08298f6f41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.118818] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.119197] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a426bbb5-7385-4aac-b41a-82cab7a775e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.128215] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 778.128215] env[69994]: value = "task-2925350" [ 778.128215] env[69994]: _type = "Task" [ 778.128215] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.136564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "234c2683-80f3-4f29-bcc9-9853338128bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.137204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.142096] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.315099] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cba61f-0fd5-2236-a1f5-6ab65fb874ce, 'name': SearchDatastore_Task, 'duration_secs': 0.013482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.315345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.315589] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.315827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.315969] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.316164] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.316462] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5287b06-ef35-4a78-8826-30b0d77c2c0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.327778] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.327991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.328756] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7665268d-0c5e-4cd4-8369-676a8090aae4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.335900] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 778.335900] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52beee9c-427d-9ae0-d610-7e80590fba8b" [ 778.335900] env[69994]: _type = "Task" [ 778.335900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.346912] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52beee9c-427d-9ae0-d610-7e80590fba8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.534071] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 203bc0d6-c149-4c3d-9ac7-962210d6b01d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.597500] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.639738] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925350, 'name': PowerOffVM_Task, 'duration_secs': 0.241792} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.639952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 778.640139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 778.640407] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ef55bad-6ae6-441b-8aed-adafda7cc692 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.670510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7609d924-2088-4f81-9496-19bf8210ba1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.677112] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 778.677367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d38e0c1d-0d49-454a-8b57-01ddcc8d1516 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.683460] env[69994]: DEBUG oslo_vmware.api [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 778.683460] env[69994]: value = "task-2925352" [ 778.683460] env[69994]: _type = "Task" [ 778.683460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.692715] env[69994]: DEBUG oslo_vmware.api [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925352, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.714094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 778.714303] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 778.714456] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleting the datastore file [datastore2] 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.714776] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e08c2ac-8b8d-4190-81cb-e6f67eb19feb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.723789] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 778.723789] env[69994]: value = "task-2925353" [ 778.723789] env[69994]: _type = "Task" [ 778.723789] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.733193] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.847803] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52beee9c-427d-9ae0-d610-7e80590fba8b, 'name': SearchDatastore_Task, 'duration_secs': 0.014428} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.848800] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-347215a3-7067-4b44-b579-93ebdb4e439b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.855703] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 778.855703] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52584e23-0da1-9d87-a95a-9491352c070f" [ 778.855703] env[69994]: _type = "Task" [ 778.855703] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.866936] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52584e23-0da1-9d87-a95a-9491352c070f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.036927] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 67f5ad56-9455-43fc-b940-8a67974703cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 779.037296] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 779.037461] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4096MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 779.119501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.198852] env[69994]: DEBUG oslo_vmware.api [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925352, 'name': SuspendVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.237193] env[69994]: DEBUG oslo_vmware.api [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157406} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.237468] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.237756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.237903] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.238117] env[69994]: INFO nova.compute.manager [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 779.238325] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.238516] env[69994]: DEBUG nova.compute.manager [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 779.238617] env[69994]: DEBUG nova.network.neutron [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.370249] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52584e23-0da1-9d87-a95a-9491352c070f, 'name': SearchDatastore_Task, 'duration_secs': 0.028543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.370430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.372238] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 03a10403-0253-4df0-84b2-1e56f0c057fe/03a10403-0253-4df0-84b2-1e56f0c057fe.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.372238] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f682372f-b794-43e6-89f3-829f4dc38c81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.380128] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 779.380128] env[69994]: value = "task-2925354" [ 779.380128] env[69994]: _type = "Task" [ 779.380128] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.389536] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.502196] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad9ac98-4299-4af0-8c01-097c625c401e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.511563] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2df1f07-97d3-495d-8269-5fd5e3ce3bd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.546242] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab12c09-991c-4f5a-b7db-b25bc15237eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.555736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5233158e-9310-47f1-840d-51b5dc22e86b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.573504] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 779.696617] env[69994]: DEBUG oslo_vmware.api [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925352, 'name': SuspendVM_Task, 'duration_secs': 0.82529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.696953] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 779.697112] env[69994]: DEBUG nova.compute.manager [None req-03a3a59d-9ff1-4e4d-938d-e8c02d020500 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.698080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc4cee3-32ad-4bc6-873a-65d2cfe8d97a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.891899] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50569} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.892698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 03a10403-0253-4df0-84b2-1e56f0c057fe/03a10403-0253-4df0-84b2-1e56f0c057fe.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.892698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.892698] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-860695c5-1846-4817-820f-95fa69a24231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.901324] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 779.901324] env[69994]: value = "task-2925355" [ 779.901324] env[69994]: _type = "Task" [ 779.901324] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.911280] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.922031] env[69994]: DEBUG nova.compute.manager [req-c4bd43b7-581d-4f8e-a9b4-bcbadf207b83 req-d8c72bbb-5604-4ec5-bb87-f0675ad291ea service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Received event network-vif-deleted-1acb2297-91d5-4675-bbec-1c950d6cd544 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.922132] env[69994]: INFO nova.compute.manager [req-c4bd43b7-581d-4f8e-a9b4-bcbadf207b83 req-d8c72bbb-5604-4ec5-bb87-f0675ad291ea service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Neutron deleted interface 1acb2297-91d5-4675-bbec-1c950d6cd544; detaching it from the instance and deleting it from the info cache [ 779.922238] env[69994]: DEBUG nova.network.neutron [req-c4bd43b7-581d-4f8e-a9b4-bcbadf207b83 req-d8c72bbb-5604-4ec5-bb87-f0675ad291ea service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.097718] env[69994]: ERROR nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [req-1fed3fc2-79de-4f0a-8095-5af53cf955fd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1fed3fc2-79de-4f0a-8095-5af53cf955fd"}]} [ 780.117471] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 780.134350] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 780.134547] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.148582] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 780.184495] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 780.402750] env[69994]: DEBUG nova.network.neutron [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.418789] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066997} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.419218] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.420064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7552d9ad-f87b-471e-b8e4-5d876ceb8024 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.436810] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1f45005-5149-488a-b205-2a63a697e405 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.448211] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 03a10403-0253-4df0-84b2-1e56f0c057fe/03a10403-0253-4df0-84b2-1e56f0c057fe.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.452291] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c660607b-b6bb-4942-90ec-91365cdaa73b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.478742] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ad43f8-424d-4039-8205-4c6a7c301a7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.493814] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 780.493814] env[69994]: value = "task-2925356" [ 780.493814] env[69994]: _type = "Task" [ 780.493814] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.504170] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.521836] env[69994]: DEBUG nova.compute.manager [req-c4bd43b7-581d-4f8e-a9b4-bcbadf207b83 req-d8c72bbb-5604-4ec5-bb87-f0675ad291ea service nova] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Detach interface failed, port_id=1acb2297-91d5-4675-bbec-1c950d6cd544, reason: Instance 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 780.672435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72064125-0123-4a32-a354-ca6dc9fc3a68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.680768] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81004cf-5234-4e4b-a3e4-07ffa89e2fe9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.718246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143f580a-db93-4b0f-a595-af8981782a68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.727187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34325b36-b0c7-4dd1-a342-d4f5f7f79e76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.742714] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.909976] env[69994]: INFO nova.compute.manager [-] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Took 1.67 seconds to deallocate network for instance. [ 781.005831] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.277977] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 781.278325] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 63 to 64 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 781.278435] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 781.417369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.506059] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925356, 'name': ReconfigVM_Task, 'duration_secs': 0.825481} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.506369] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 03a10403-0253-4df0-84b2-1e56f0c057fe/03a10403-0253-4df0-84b2-1e56f0c057fe.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.506987] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0e4b770-e6c9-4481-83be-8a078fca22ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.515207] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 781.515207] env[69994]: value = "task-2925357" [ 781.515207] env[69994]: _type = "Task" [ 781.515207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.523820] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925357, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.783285] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 781.783584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.372s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.783858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.445s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.785485] env[69994]: INFO nova.compute.claims [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.977738] env[69994]: DEBUG nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.978717] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16191328-63e7-42d1-b63f-878ab2e43f7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.028628] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925357, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.490104] env[69994]: INFO nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] instance snapshotting [ 782.490418] env[69994]: WARNING nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 782.493198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f51f5dc-b712-4e63-9b41-73c717563ad1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.513475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfccc67-68c8-4eec-adbf-bf6286b5850a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.528977] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925357, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.025827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 783.026147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-202f2341-ba55-46f7-8499-3e6bcbf4d241 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.032246] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925357, 'name': Rename_Task, 'duration_secs': 1.162417} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.033059] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.033284] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ffb2cb6-cefa-4c45-ba34-81e99484d243 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.038903] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 783.038903] env[69994]: value = "task-2925358" [ 783.038903] env[69994]: _type = "Task" [ 783.038903] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.045545] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 783.045545] env[69994]: value = "task-2925359" [ 783.045545] env[69994]: _type = "Task" [ 783.045545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.051823] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925358, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.061722] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.188790] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52abfdb3-ff84-44f0-8da9-9a2c1368891e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.197060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9723c6-72dd-4b0d-baf3-61b7e292d694 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.232443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de63866f-1784-4655-95de-d2fefa023fcf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.240896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae208f9-7bb6-4c97-a697-f523f3f2febf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.255057] env[69994]: DEBUG nova.compute.provider_tree [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.552249] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925358, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.557428] env[69994]: DEBUG oslo_vmware.api [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925359, 'name': PowerOnVM_Task, 'duration_secs': 0.507213} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.557706] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.557911] env[69994]: INFO nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Took 9.12 seconds to spawn the instance on the hypervisor. [ 783.558098] env[69994]: DEBUG nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.558847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67cea88-8b36-42e3-bff4-939bcb84b911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.757818] env[69994]: DEBUG nova.scheduler.client.report [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.053737] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925358, 'name': CreateSnapshot_Task, 'duration_secs': 0.596544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.053985] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 784.054750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a362fd89-a11b-4efa-9b50-65dff97bb428 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.075021] env[69994]: INFO nova.compute.manager [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Took 47.29 seconds to build instance. [ 784.262827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.263417] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.267233] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.538s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.267489] env[69994]: DEBUG nova.objects.instance [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 784.575628] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 784.575954] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7a3430d1-cffe-4065-9bbe-f458539a4d0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.579429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bcc27e4a-1709-45b0-a8e3-752b398e2f1e tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.654s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.587536] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 784.587536] env[69994]: value = "task-2925360" [ 784.587536] env[69994]: _type = "Task" [ 784.587536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.597705] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925360, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.773130] env[69994]: DEBUG nova.compute.utils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 784.780059] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.780059] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.827044] env[69994]: DEBUG nova.policy [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c8a1a049c7047328246229680c0bf24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '069aad9dee5a434383cecdd983f451b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.081911] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.098441] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925360, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.138185] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Successfully created port: ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.280959] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.284583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78b294f8-eaec-4412-bc0f-0239e77592a2 tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.286092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.599s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.286290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.288575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.227s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.288878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.290617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.050s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.292102] env[69994]: INFO nova.compute.claims [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.325454] env[69994]: INFO nova.scheduler.client.report [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Deleted allocations for instance 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d [ 785.327493] env[69994]: INFO nova.scheduler.client.report [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Deleted allocations for instance 1693ccdf-ea72-45d5-8b34-e2b0e155e528 [ 785.616934] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925360, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.624019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.788101] env[69994]: INFO nova.virt.block_device [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Booting with volume e1cb381a-6162-44bf-a51f-61502bd6cb59 at /dev/sda [ 785.838157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2bf49990-6301-4768-b27d-b6f10d50530c tempest-DeleteServersAdminTestJSON-328123481 tempest-DeleteServersAdminTestJSON-328123481-project-member] Lock "2ee43622-74f3-4bf6-88e3-cba4ff7ce33d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.741s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.839234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f122903-4b7b-41f2-8183-9a71d402aeaa tempest-ServerShowV254Test-385686127 tempest-ServerShowV254Test-385686127-project-member] Lock "1693ccdf-ea72-45d5-8b34-e2b0e155e528" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.983s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.843259] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-755ee959-c11e-4c82-8846-6835de0a9fbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.859536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75987b7-fc9a-4193-a7db-909c5652425c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.894354] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dee23ad7-4d19-45ea-8e2e-1badb54b88c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.903480] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beac14e6-f6ee-47b4-9526-11c131019edf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.939825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a8844a-aa4e-4a73-ac51-dc71b4473504 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.949115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7f9246-4321-440e-8b82-ac103bcc4cbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.964819] env[69994]: DEBUG nova.virt.block_device [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updating existing volume attachment record: 149170e1-2aa1-4afd-be3f-ccd9b56a035d {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 786.105881] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925360, 'name': CloneVM_Task, 'duration_secs': 1.367248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.106261] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Created linked-clone VM from snapshot [ 786.107684] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340de75a-a7c3-44b1-96bb-586a1f40ceea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.110229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "03a10403-0253-4df0-84b2-1e56f0c057fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.110525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.111083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.111083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.111260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.113373] env[69994]: INFO nova.compute.manager [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Terminating instance [ 786.119163] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Uploading image e74b58ec-98de-4cab-ac70-0dc844126ba5 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 786.147315] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 786.147315] env[69994]: value = "vm-587461" [ 786.147315] env[69994]: _type = "VirtualMachine" [ 786.147315] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 786.147636] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-873884c4-29f3-4569-99e7-ae90c77412f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.156231] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease: (returnval){ [ 786.156231] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529baec2-714c-1a20-6b75-36f4899d6a44" [ 786.156231] env[69994]: _type = "HttpNfcLease" [ 786.156231] env[69994]: } obtained for exporting VM: (result){ [ 786.156231] env[69994]: value = "vm-587461" [ 786.156231] env[69994]: _type = "VirtualMachine" [ 786.156231] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 786.156937] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the lease: (returnval){ [ 786.156937] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529baec2-714c-1a20-6b75-36f4899d6a44" [ 786.156937] env[69994]: _type = "HttpNfcLease" [ 786.156937] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 786.168269] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 786.168269] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529baec2-714c-1a20-6b75-36f4899d6a44" [ 786.168269] env[69994]: _type = "HttpNfcLease" [ 786.168269] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 786.622745] env[69994]: DEBUG nova.compute.manager [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 786.623046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.623933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c112167d-0446-4c3e-b373-a6eafb5e9a58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.634950] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.635242] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc660c79-cb53-4a2c-8003-e7568bcc0ab6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.644845] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 786.644845] env[69994]: value = "task-2925362" [ 786.644845] env[69994]: _type = "Task" [ 786.644845] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.654256] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.668378] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 786.668378] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529baec2-714c-1a20-6b75-36f4899d6a44" [ 786.668378] env[69994]: _type = "HttpNfcLease" [ 786.668378] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 786.668667] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 786.668667] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529baec2-714c-1a20-6b75-36f4899d6a44" [ 786.668667] env[69994]: _type = "HttpNfcLease" [ 786.668667] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 786.669646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efcf29f-da60-4d76-b13e-dc00edd36175 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.677923] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 786.678135] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 786.790360] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-648717e8-41c0-451b-a0cd-c902a090aa43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.828982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.829147] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.833632] env[69994]: DEBUG nova.objects.instance [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid e46b8a11-650a-4e34-bc4a-e1c1b2515e76 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 786.845100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fb6cdd-86fb-4863-a484-17c15c5439d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.856975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e208ecfb-1298-4725-b272-d2f895cab0b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.894395] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd98d036-292c-4a87-a313-e7a5e488fbd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.902661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5325f34-dd2d-44d7-a0aa-1b315f8eddbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.917672] env[69994]: DEBUG nova.compute.provider_tree [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.013540] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Successfully updated port: ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.071350] env[69994]: DEBUG nova.compute.manager [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Received event network-vif-plugged-ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.071350] env[69994]: DEBUG oslo_concurrency.lockutils [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] Acquiring lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.071350] env[69994]: DEBUG oslo_concurrency.lockutils [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.071350] env[69994]: DEBUG oslo_concurrency.lockutils [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.071350] env[69994]: DEBUG nova.compute.manager [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] No waiting events found dispatching network-vif-plugged-ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 787.071350] env[69994]: WARNING nova.compute.manager [req-c00e7219-6cea-43b3-aa5f-5e79bbe539cd req-7c01d4dc-5a8b-4745-94c5-2eace1182239 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Received unexpected event network-vif-plugged-ecb02147-aeb7-4256-9ce1-e20d727853b4 for instance with vm_state building and task_state block_device_mapping. [ 787.155713] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925362, 'name': PowerOffVM_Task, 'duration_secs': 0.214045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.156081] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.156360] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.156701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e8a7338-3435-44c0-b456-55f423f8791e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.244217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.244217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.244457] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Deleting the datastore file [datastore1] 03a10403-0253-4df0-84b2-1e56f0c057fe {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.244813] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1cb4c87-592e-4a90-84ad-fad5e19f4e17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.252129] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for the task: (returnval){ [ 787.252129] env[69994]: value = "task-2925364" [ 787.252129] env[69994]: _type = "Task" [ 787.252129] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.261657] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.422768] env[69994]: DEBUG nova.scheduler.client.report [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.516211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.516512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquired lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.516596] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.600089] env[69994]: DEBUG nova.objects.instance [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid e46b8a11-650a-4e34-bc4a-e1c1b2515e76 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.765235] env[69994]: DEBUG oslo_vmware.api [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Task: {'id': task-2925364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201756} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.765784] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.765844] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 787.766129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.766379] env[69994]: INFO nova.compute.manager [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Took 1.14 seconds to destroy the instance on the hypervisor. [ 787.766681] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.766951] env[69994]: DEBUG nova.compute.manager [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 787.767516] env[69994]: DEBUG nova.network.neutron [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.929324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.929990] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 787.933129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.323s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.934818] env[69994]: INFO nova.compute.claims [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.061116] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.082257] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 788.082257] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 788.082968] env[69994]: DEBUG nova.virt.hardware [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 788.087129] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965ead9e-3a99-4f2f-b994-9d59ef433c67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.098742] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabfbe26-2ecf-438e-bf4b-b80b1f36e77b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.104147] env[69994]: DEBUG nova.objects.base [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 788.104444] env[69994]: DEBUG nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.206794] env[69994]: DEBUG nova.policy [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 788.306717] env[69994]: DEBUG nova.network.neutron [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updating instance_info_cache with network_info: [{"id": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "address": "fa:16:3e:eb:df:d3", "network": {"id": "182d9a94-1841-4933-bfdc-a6e1bcd6de39", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1233445263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "069aad9dee5a434383cecdd983f451b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb02147-ae", "ovs_interfaceid": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.440931] env[69994]: DEBUG nova.compute.utils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 788.445126] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 788.445335] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.527236] env[69994]: DEBUG nova.policy [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23dbc2114868476ebf5c4c3213a0018e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8573901af35a498196e22b32f545ce4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 788.528174] env[69994]: DEBUG nova.network.neutron [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.712706] env[69994]: DEBUG nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Successfully created port: 016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 788.811650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Releasing lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.812125] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Instance network_info: |[{"id": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "address": "fa:16:3e:eb:df:d3", "network": {"id": "182d9a94-1841-4933-bfdc-a6e1bcd6de39", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1233445263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "069aad9dee5a434383cecdd983f451b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb02147-ae", "ovs_interfaceid": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.812466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:df:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604056d6-6dd6-47fa-9eaa-6863a3a7c488', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecb02147-aeb7-4256-9ce1-e20d727853b4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.824399] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Creating folder: Project (069aad9dee5a434383cecdd983f451b8). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 788.824724] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-477465a1-065a-4165-9b3d-d1e430d7994b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.840627] env[69994]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 788.840845] env[69994]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69994) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 788.841268] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Folder already exists: Project (069aad9dee5a434383cecdd983f451b8). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 788.841509] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Creating folder: Instances. Parent ref: group-v587401. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 788.841772] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ada85dc3-c45b-43a8-926d-59c06952cb37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.854291] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Created folder: Instances in parent group-v587401. [ 788.854711] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.855589] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.855589] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54318272-09ca-4be6-995b-4c72f3013565 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.878346] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.878346] env[69994]: value = "task-2925367" [ 788.878346] env[69994]: _type = "Task" [ 788.878346] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.888195] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925367, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.946339] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 789.034968] env[69994]: INFO nova.compute.manager [-] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Took 1.27 seconds to deallocate network for instance. [ 789.054160] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Successfully created port: 359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.389210] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925367, 'name': CreateVM_Task, 'duration_secs': 0.367018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.391829] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.393644] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587410', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'name': 'volume-e1cb381a-6162-44bf-a51f-61502bd6cb59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1f0d79f-dc67-4cf9-816c-c451f20d65ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'serial': 'e1cb381a-6162-44bf-a51f-61502bd6cb59'}, 'attachment_id': '149170e1-2aa1-4afd-be3f-ccd9b56a035d', 'device_type': None, 'mount_device': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 789.393644] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 789.393846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30535033-224a-4c6a-a831-150e9e066ebc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.402951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bf0f85-c77c-412f-817d-8af51d07d7ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.413216] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6378c244-9f24-486c-a8eb-17d25961efac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.424075] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-5f6372dd-3cbf-4f61-9cb7-6a866f0b2266 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.436385] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 789.436385] env[69994]: value = "task-2925368" [ 789.436385] env[69994]: _type = "Task" [ 789.436385] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.451033] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.459878] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fc4726-2b9a-4a0c-a8ea-4b184b295e46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.469087] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6499ba31-8d9b-4f81-9ba5-8aae1e21e4bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.504464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e9eb80-7ef5-4ace-b25e-9657c655e59b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.508751] env[69994]: DEBUG nova.compute.manager [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Received event network-changed-ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.508908] env[69994]: DEBUG nova.compute.manager [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Refreshing instance network info cache due to event network-changed-ecb02147-aeb7-4256-9ce1-e20d727853b4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 789.509193] env[69994]: DEBUG oslo_concurrency.lockutils [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] Acquiring lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.509338] env[69994]: DEBUG oslo_concurrency.lockutils [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] Acquired lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.509501] env[69994]: DEBUG nova.network.neutron [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Refreshing network info cache for port ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.518627] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4cd39b-34d2-4a61-9397-56f289c31e4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.535349] env[69994]: DEBUG nova.compute.provider_tree [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.543412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.744505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.744949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.949905] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 34%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.962030] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 789.990407] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.990762] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.990951] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.991193] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.991342] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.991503] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.991763] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.991962] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.992191] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.992361] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.992558] env[69994]: DEBUG nova.virt.hardware [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.993635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aceb921-57bf-4c5a-b351-068ec26304df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.002913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfd63a4-fe10-4ece-801f-89df78e27b96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.039087] env[69994]: DEBUG nova.scheduler.client.report [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.246343] env[69994]: DEBUG nova.network.neutron [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updated VIF entry in instance network info cache for port ecb02147-aeb7-4256-9ce1-e20d727853b4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.246908] env[69994]: DEBUG nova.network.neutron [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updating instance_info_cache with network_info: [{"id": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "address": "fa:16:3e:eb:df:d3", "network": {"id": "182d9a94-1841-4933-bfdc-a6e1bcd6de39", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1233445263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "069aad9dee5a434383cecdd983f451b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb02147-ae", "ovs_interfaceid": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.249026] env[69994]: DEBUG nova.compute.utils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.444316] env[69994]: DEBUG nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Successfully updated port: 016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.453337] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 47%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.546670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.547719] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.550692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.368s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.552276] env[69994]: INFO nova.compute.claims [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.694522] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Successfully updated port: 359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.752127] env[69994]: DEBUG oslo_concurrency.lockutils [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] Releasing lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.752430] env[69994]: DEBUG nova.compute.manager [req-56cd2fa1-3458-4e0a-9d4a-781e21c19d0f req-f553c029-4d34-4462-9668-0c61bae0f2a3 service nova] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Received event network-vif-deleted-522a6fbf-6853-4dc5-ba10-82f41421ee4a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.754236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.951635] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 60%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.955124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.955403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.955685] env[69994]: DEBUG nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.058998] env[69994]: DEBUG nova.compute.utils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 791.061044] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 791.061236] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.103078] env[69994]: DEBUG nova.policy [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '531fa6ec206244009b9c83dc4141fe7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1a14bacc0a843b2840c52111795ab8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.198480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.198584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquired lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.198723] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.424252] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Successfully created port: 20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.449959] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 75%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.530116] env[69994]: WARNING nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 791.566489] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.760506] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.838770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.839128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.839317] env[69994]: INFO nova.compute.manager [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Attaching volume 71826ac2-cb8c-418c-90dc-110f088d489d to /dev/sdb [ 791.886519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443665b0-f5d5-4673-8939-bb5cfd1b5f09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.900969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5979e7-0dce-4cc5-9e2c-03a12ce308d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.921272] env[69994]: DEBUG nova.virt.block_device [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating existing volume attachment record: e13212ab-98a4-46ff-a5ac-733a17e02ead {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 791.947335] env[69994]: DEBUG nova.network.neutron [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Updating instance_info_cache with network_info: [{"id": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "address": "fa:16:3e:68:d6:ea", "network": {"id": "76ce67d1-cda9-4f17-a4b9-c534ee75267f", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1569714150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8573901af35a498196e22b32f545ce4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359d6f2e-89", "ovs_interfaceid": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.955847] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.076352] env[69994]: DEBUG nova.network.neutron [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "016f8957-6a5e-4487-b3e5-cb437366c800", "address": "fa:16:3e:80:d6:83", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap016f8957-6a", "ovs_interfaceid": "016f8957-6a5e-4487-b3e5-cb437366c800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.090851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1345fe-33bc-494c-a294-e285bdd64862 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.099170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc99b5a-a3d8-4cfb-8ddc-65cfcd38f221 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.132415] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603256c2-7f6b-4467-8f2b-bd80ffd0130d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.140671] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07da5fa0-dc36-4e1f-bed0-111fc69dafb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.155518] env[69994]: DEBUG nova.compute.provider_tree [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.451816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Releasing lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.452162] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Instance network_info: |[{"id": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "address": "fa:16:3e:68:d6:ea", "network": {"id": "76ce67d1-cda9-4f17-a4b9-c534ee75267f", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1569714150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8573901af35a498196e22b32f545ce4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359d6f2e-89", "ovs_interfaceid": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 792.452470] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 92%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.452883] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:d6:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '359d6f2e-895c-4d81-afd6-815307b7c4e9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.460972] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Creating folder: Project (8573901af35a498196e22b32f545ce4c). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.461252] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69990f77-9463-4fa9-89ba-d09bd9787443 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.473905] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Created folder: Project (8573901af35a498196e22b32f545ce4c) in parent group-v587342. [ 792.474247] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Creating folder: Instances. Parent ref: group-v587466. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.474668] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-638ee16f-ea6f-4d91-93d7-e3534f702370 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.486941] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Created folder: Instances in parent group-v587466. [ 792.487378] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.487586] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.487813] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fa0bd6d-db87-4ec6-8b2d-1dc2b208a561 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.510845] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.510845] env[69994]: value = "task-2925374" [ 792.510845] env[69994]: _type = "Task" [ 792.510845] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.521095] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925374, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.580678] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.583045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.583680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.583842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.585100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4037cc36-e794-4da4-aff7-928ea3fb2840 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.603505] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.603782] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.603941] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.604139] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.604286] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.604435] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.604640] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.604801] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.604963] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.605149] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.605326] env[69994]: DEBUG nova.virt.hardware [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.611711] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 792.614912] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9619509-d325-418f-8abe-bd025f9d87f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.637918] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.638194] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.639045] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.639045] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.639045] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.639045] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.639268] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.639268] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.639414] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.639626] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.639876] env[69994]: DEBUG nova.virt.hardware [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.640867] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756df4cf-e80b-42fa-a6c0-126894a93a72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.646708] env[69994]: DEBUG oslo_vmware.api [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 792.646708] env[69994]: value = "task-2925375" [ 792.646708] env[69994]: _type = "Task" [ 792.646708] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.648369] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-vif-plugged-016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.648608] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.648846] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.649060] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.649256] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] No waiting events found dispatching network-vif-plugged-016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.649427] env[69994]: WARNING nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received unexpected event network-vif-plugged-016f8957-6a5e-4487-b3e5-cb437366c800 for instance with vm_state active and task_state None. [ 792.649628] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-changed-016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.649800] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing instance network info cache due to event network-changed-016f8957-6a5e-4487-b3e5-cb437366c800. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 792.649984] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.650166] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.650353] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Refreshing network info cache for port 016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.659795] env[69994]: DEBUG nova.scheduler.client.report [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.665467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660facb3-b25f-44c0-a4b6-e9588e8e4c27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.675112] env[69994]: DEBUG oslo_vmware.api [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925375, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.952022] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.021525] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925374, 'name': CreateVM_Task, 'duration_secs': 0.420981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.021779] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.022507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.022619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.022897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 793.023166] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c48455e3-1fbc-4e46-a89c-5b5c67a885cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.028605] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 793.028605] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523204c8-3388-0887-1b9c-ea300b36597c" [ 793.028605] env[69994]: _type = "Task" [ 793.028605] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.038167] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523204c8-3388-0887-1b9c-ea300b36597c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.166354] env[69994]: DEBUG oslo_vmware.api [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.175522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.176187] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 793.179240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.252s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.180782] env[69994]: INFO nova.compute.claims [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.454680] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task} progress is 98%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.537196] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Successfully updated port: 20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.543576] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523204c8-3388-0887-1b9c-ea300b36597c, 'name': SearchDatastore_Task, 'duration_secs': 0.013848} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.546735] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.546967] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.547197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.547422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.547643] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.547969] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac4ec595-8edc-4a35-819c-1d26f17101cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.562289] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 793.562504] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 793.563331] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08b75918-7bf2-4703-8cb7-48fdcddf7d67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.571080] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 793.571080] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52186b22-40db-10fc-71b0-0f474a02284a" [ 793.571080] env[69994]: _type = "Task" [ 793.571080] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.580773] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52186b22-40db-10fc-71b0-0f474a02284a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.602210] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updated VIF entry in instance network info cache for port 016f8957-6a5e-4487-b3e5-cb437366c800. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.602655] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "016f8957-6a5e-4487-b3e5-cb437366c800", "address": "fa:16:3e:80:d6:83", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap016f8957-6a", "ovs_interfaceid": "016f8957-6a5e-4487-b3e5-cb437366c800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.663047] env[69994]: DEBUG oslo_vmware.api [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925375, 'name': ReconfigVM_Task, 'duration_secs': 0.952867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.663606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.663800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 793.690305] env[69994]: DEBUG nova.compute.utils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.694893] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.695079] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.776374] env[69994]: DEBUG nova.policy [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc0799f063d84f6aa0953ecb32f106ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5642969c42ae403cbfb4d5989e399f8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.953864] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925368, 'name': RelocateVM_Task, 'duration_secs': 4.167026} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.953864] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 793.954075] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587410', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'name': 'volume-e1cb381a-6162-44bf-a51f-61502bd6cb59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1f0d79f-dc67-4cf9-816c-c451f20d65ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'serial': 'e1cb381a-6162-44bf-a51f-61502bd6cb59'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 793.954769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ccaaa3-738d-4987-9167-a6a893dcd120 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.974056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4678c941-a1c4-40e7-9a47-8e437d0d4f94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.002893] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] volume-e1cb381a-6162-44bf-a51f-61502bd6cb59/volume-e1cb381a-6162-44bf-a51f-61502bd6cb59.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.002893] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eca7d47-fc25-4671-b7c6-136647e4f545 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.029254] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 794.029254] env[69994]: value = "task-2925376" [ 794.029254] env[69994]: _type = "Task" [ 794.029254] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.038022] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.044770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.044920] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquired lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.045017] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.085730] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52186b22-40db-10fc-71b0-0f474a02284a, 'name': SearchDatastore_Task, 'duration_secs': 0.012833} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.086911] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac088294-5f47-4b5d-825a-59e3769793f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.095613] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 794.095613] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521b11fa-e256-068b-e71d-a65333125cf0" [ 794.095613] env[69994]: _type = "Task" [ 794.095613] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.110022] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Received event network-vif-plugged-359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquiring lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.110022] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] No waiting events found dispatching network-vif-plugged-359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.110022] env[69994]: WARNING nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Received unexpected event network-vif-plugged-359d6f2e-895c-4d81-afd6-815307b7c4e9 for instance with vm_state building and task_state spawning. [ 794.110022] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Received event network-changed-359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.110022] env[69994]: DEBUG nova.compute.manager [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Refreshing instance network info cache due to event network-changed-359d6f2e-895c-4d81-afd6-815307b7c4e9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquiring lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.110022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Acquired lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.110022] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Refreshing network info cache for port 359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.111024] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521b11fa-e256-068b-e71d-a65333125cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.169401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3440cdf8-4676-4878-8e72-6b96ffcc8ac9 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.340s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.193818] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 794.207318] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Successfully created port: 885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.541076] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925376, 'name': ReconfigVM_Task, 'duration_secs': 0.477349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.541613] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Reconfigured VM instance instance-00000028 to attach disk [datastore2] volume-e1cb381a-6162-44bf-a51f-61502bd6cb59/volume-e1cb381a-6162-44bf-a51f-61502bd6cb59.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.552312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-724cb72c-a5c7-4ff8-9839-521478d3ccd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.572495] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 794.572495] env[69994]: value = "task-2925378" [ 794.572495] env[69994]: _type = "Task" [ 794.572495] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.583177] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.604574] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.616097] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521b11fa-e256-068b-e71d-a65333125cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.022833} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.618848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.619234] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9a1343a8-11b4-4c9e-8445-931eab036a4d/9a1343a8-11b4-4c9e-8445-931eab036a4d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 794.623042] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e04777a-d291-4483-8d7f-9a93e762b31e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.636776] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 794.636776] env[69994]: value = "task-2925379" [ 794.636776] env[69994]: _type = "Task" [ 794.636776] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.650219] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.772663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b735458b-08ad-42f9-bcb9-cba8dcf1ee24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.784565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f36e0f-cae3-485e-876d-8123c10965dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.791889] env[69994]: DEBUG nova.network.neutron [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updating instance_info_cache with network_info: [{"id": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "address": "fa:16:3e:93:ca:50", "network": {"id": "7bf48270-52c6-4ea7-9eee-06d41454c823", "bridge": "br-int", "label": "tempest-ServersTestJSON-13152800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a14bacc0a843b2840c52111795ab8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20b9c01f-38", "ovs_interfaceid": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.820096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Releasing lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.820399] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Instance network_info: |[{"id": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "address": "fa:16:3e:93:ca:50", "network": {"id": "7bf48270-52c6-4ea7-9eee-06d41454c823", "bridge": "br-int", "label": "tempest-ServersTestJSON-13152800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a14bacc0a843b2840c52111795ab8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20b9c01f-38", "ovs_interfaceid": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.823694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda80cf6-c1cb-42ef-8e9d-e847c9ffc10a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.826532] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:ca:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '304be4f7-4e36-4468-9ef4-e457341cef18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20b9c01f-3830-45bc-82b7-4b7014586c1c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.833809] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Creating folder: Project (e1a14bacc0a843b2840c52111795ab8d). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 794.834538] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1926195-28ae-4318-9008-f8a2c72fbf2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.845176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e63df4-db58-43be-b9c8-6cb989c4c51a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.850835] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Created folder: Project (e1a14bacc0a843b2840c52111795ab8d) in parent group-v587342. [ 794.851193] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Creating folder: Instances. Parent ref: group-v587469. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 794.851886] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9561eaa-9079-49c9-a0e6-48df27bdfafd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.863165] env[69994]: DEBUG nova.compute.provider_tree [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.866101] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Created folder: Instances in parent group-v587469. [ 794.866525] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.868033] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.868033] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-281d91b2-279b-4bc5-84b9-a0a611e47560 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.891994] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.891994] env[69994]: value = "task-2925382" [ 794.891994] env[69994]: _type = "Task" [ 794.891994] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.902042] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925382, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.926502] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Updated VIF entry in instance network info cache for port 359d6f2e-895c-4d81-afd6-815307b7c4e9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.926955] env[69994]: DEBUG nova.network.neutron [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Updating instance_info_cache with network_info: [{"id": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "address": "fa:16:3e:68:d6:ea", "network": {"id": "76ce67d1-cda9-4f17-a4b9-c534ee75267f", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1569714150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8573901af35a498196e22b32f545ce4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359d6f2e-89", "ovs_interfaceid": "359d6f2e-895c-4d81-afd6-815307b7c4e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.951019] env[69994]: DEBUG nova.compute.manager [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Received event network-vif-plugged-20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.951163] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Acquiring lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.951398] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.951551] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.951712] env[69994]: DEBUG nova.compute.manager [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] No waiting events found dispatching network-vif-plugged-20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.951845] env[69994]: WARNING nova.compute.manager [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Received unexpected event network-vif-plugged-20b9c01f-3830-45bc-82b7-4b7014586c1c for instance with vm_state building and task_state spawning. [ 794.952065] env[69994]: DEBUG nova.compute.manager [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Received event network-changed-20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.952159] env[69994]: DEBUG nova.compute.manager [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Refreshing instance network info cache due to event network-changed-20b9c01f-3830-45bc-82b7-4b7014586c1c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.952344] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Acquiring lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.952467] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Acquired lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.952623] env[69994]: DEBUG nova.network.neutron [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Refreshing network info cache for port 20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.083935] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925378, 'name': ReconfigVM_Task, 'duration_secs': 0.209208} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.084292] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587410', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'name': 'volume-e1cb381a-6162-44bf-a51f-61502bd6cb59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1f0d79f-dc67-4cf9-816c-c451f20d65ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'serial': 'e1cb381a-6162-44bf-a51f-61502bd6cb59'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 795.084836] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-242fa54d-8ba2-455c-893a-02703606110d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.094048] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 795.094048] env[69994]: value = "task-2925383" [ 795.094048] env[69994]: _type = "Task" [ 795.094048] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.105150] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925383, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.147305] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.209325] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 795.232994] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:32:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='231a442d-6575-43ed-8970-683d59890f06',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1776927979',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 795.233364] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.233637] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 795.233878] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.234058] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 795.234240] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 795.234521] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 795.234737] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 795.235021] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 795.235280] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 795.235594] env[69994]: DEBUG nova.virt.hardware [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 795.236714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c208d06-7a61-4fd9-bd36-11712b6c52a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.245806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a4baac-5a0f-41ea-bc02-645523e3a81e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.369039] env[69994]: DEBUG nova.scheduler.client.report [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.403742] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925382, 'name': CreateVM_Task, 'duration_secs': 0.436391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.403859] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.404498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.404664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.405071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.405562] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fbf0a46-b61b-4d0a-bd48-ea9b87f4ad7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.413022] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 795.413022] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c646ee-7070-7775-403f-cfedecbb1d82" [ 795.413022] env[69994]: _type = "Task" [ 795.413022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.421493] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c646ee-7070-7775-403f-cfedecbb1d82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.429732] env[69994]: DEBUG oslo_concurrency.lockutils [req-b05467b5-d4b3-48ce-8067-750774ef3504 req-737d4686-eeec-46cc-9725-a75b431c694f service nova] Releasing lock "refresh_cache-9a1343a8-11b4-4c9e-8445-931eab036a4d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.605055] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925383, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.648414] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.708448] env[69994]: DEBUG nova.network.neutron [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updated VIF entry in instance network info cache for port 20b9c01f-3830-45bc-82b7-4b7014586c1c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.708812] env[69994]: DEBUG nova.network.neutron [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updating instance_info_cache with network_info: [{"id": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "address": "fa:16:3e:93:ca:50", "network": {"id": "7bf48270-52c6-4ea7-9eee-06d41454c823", "bridge": "br-int", "label": "tempest-ServersTestJSON-13152800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a14bacc0a843b2840c52111795ab8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20b9c01f-38", "ovs_interfaceid": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.870496] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Successfully updated port: 885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.875889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.876445] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.879381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.515s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.880847] env[69994]: INFO nova.compute.claims [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.928223] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c646ee-7070-7775-403f-cfedecbb1d82, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.928223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.928223] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.928223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.928223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.928223] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.928223] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72d3ca51-1540-4027-bb0c-9467583f18d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.943588] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.943841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.944940] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b57280f-4d26-486d-b9db-110686b13249 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.953027] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 795.953027] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd3314-d689-e990-f9dc-a05964356556" [ 795.953027] env[69994]: _type = "Task" [ 795.953027] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.968028] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd3314-d689-e990-f9dc-a05964356556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.028345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-016f8957-6a5e-4487-b3e5-cb437366c800" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.028660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-016f8957-6a5e-4487-b3e5-cb437366c800" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.107495] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925383, 'name': Rename_Task, 'duration_secs': 0.791329} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.107887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.108058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a2cb52a-77a1-410f-9d0a-4432e7ded0e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.116356] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 796.116356] env[69994]: value = "task-2925384" [ 796.116356] env[69994]: _type = "Task" [ 796.116356] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.125794] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.149265] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925379, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.211974] env[69994]: DEBUG oslo_concurrency.lockutils [req-2184101a-5083-4d79-9e13-4b3c33b85e32 req-9ae0ba40-8875-4429-99f4-2379cb9ac94c service nova] Releasing lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.373870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.374034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.374250] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.388524] env[69994]: DEBUG nova.compute.utils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.394076] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.394344] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.423123] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 796.424474] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bdfb09-e33f-4396-935a-c4043f2155a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.432378] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 796.432618] env[69994]: ERROR oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk due to incomplete transfer. [ 796.432852] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ad7c7b49-d977-4f1d-91d6-962fe68ed360 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.442874] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5f7b4-1679-1521-5216-5cce27b539b9/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 796.442874] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Uploaded image e74b58ec-98de-4cab-ac70-0dc844126ba5 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 796.444201] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 796.444431] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a4575f45-59f2-401c-bbeb-7c8ca6fbf208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.451759] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 796.451759] env[69994]: value = "task-2925385" [ 796.451759] env[69994]: _type = "Task" [ 796.451759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.456301] env[69994]: DEBUG nova.policy [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c05ace0df7fe4a72bb3045dcb50fdfe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a9a6d7e114941d5a384d9907b491335', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.468008] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925385, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.471851] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd3314-d689-e990-f9dc-a05964356556, 'name': SearchDatastore_Task, 'duration_secs': 0.057524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.472902] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-710837cf-e0c0-41e3-9cd0-ea2a15189475 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.480315] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 796.480315] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c6ea6-f993-1d67-1216-756e25719e30" [ 796.480315] env[69994]: _type = "Task" [ 796.480315] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.490684] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c6ea6-f993-1d67-1216-756e25719e30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.498504] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 796.498748] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587465', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'name': 'volume-71826ac2-cb8c-418c-90dc-110f088d489d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b00d09ea-5eee-47ed-adcb-288cdd362e89', 'attached_at': '', 'detached_at': '', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'serial': '71826ac2-cb8c-418c-90dc-110f088d489d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 796.499707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c040bb84-7b0d-4de3-af41-552b17f4f726 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.518692] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263dbe90-4f4f-44f2-b2ea-6132993e9257 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.543740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.544042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.552700] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] volume-71826ac2-cb8c-418c-90dc-110f088d489d/volume-71826ac2-cb8c-418c-90dc-110f088d489d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.553557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e279303-4d47-4248-88b3-809cee25dd63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.556577] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38676502-41a7-47c4-a893-8c01e99fec30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.588885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d86a43-294d-49dd-8a42-8703cc977f49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.592486] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 796.592486] env[69994]: value = "task-2925386" [ 796.592486] env[69994]: _type = "Task" [ 796.592486] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.619400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 796.623646] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34f31500-dbc2-4279-87b0-20fe999635b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.639334] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925386, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.653573] env[69994]: DEBUG oslo_vmware.api [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925384, 'name': PowerOnVM_Task, 'duration_secs': 0.531664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.658067] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.658368] env[69994]: INFO nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Took 8.58 seconds to spawn the instance on the hypervisor. [ 796.659023] env[69994]: DEBUG nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.659023] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 796.659023] env[69994]: value = "task-2925387" [ 796.659023] env[69994]: _type = "Task" [ 796.659023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.659234] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925379, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.550567} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.659982] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b3dac0-04dc-401a-9b9d-3049a99a83f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.662947] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9a1343a8-11b4-4c9e-8445-931eab036a4d/9a1343a8-11b4-4c9e-8445-931eab036a4d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.663199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.666551] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f3cd3d5-3fd2-4879-87be-12351067d553 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.684264] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 796.684264] env[69994]: value = "task-2925388" [ 796.684264] env[69994]: _type = "Task" [ 796.684264] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.684690] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.696653] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925388, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.859649] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Successfully created port: c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.906716] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 796.930301] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.968971] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925385, 'name': Destroy_Task, 'duration_secs': 0.404347} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.969297] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Destroyed the VM [ 796.969539] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 796.970132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-44a49883-c695-454c-b7c8-ceb75a26c58f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.980129] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 796.980129] env[69994]: value = "task-2925389" [ 796.980129] env[69994]: _type = "Task" [ 796.980129] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.993725] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925389, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.000113] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c6ea6-f993-1d67-1216-756e25719e30, 'name': SearchDatastore_Task, 'duration_secs': 0.015712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.000113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.000113] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c06a2540-e77d-48c0-967f-94e2a53c4d8f/c06a2540-e77d-48c0-967f-94e2a53c4d8f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 797.000113] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2972277-b9b2-45e6-ac48-b22ece704be5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.007870] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 797.007870] env[69994]: value = "task-2925390" [ 797.007870] env[69994]: _type = "Task" [ 797.007870] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.013657] env[69994]: DEBUG nova.compute.manager [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Received event network-vif-plugged-885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.013758] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Acquiring lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.013954] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.014126] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.014283] env[69994]: DEBUG nova.compute.manager [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] No waiting events found dispatching network-vif-plugged-885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 797.014439] env[69994]: WARNING nova.compute.manager [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Received unexpected event network-vif-plugged-885142d2-3a31-487c-b773-a0b0df2e4e40 for instance with vm_state building and task_state spawning. [ 797.014631] env[69994]: DEBUG nova.compute.manager [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Received event network-changed-885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.014725] env[69994]: DEBUG nova.compute.manager [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Refreshing instance network info cache due to event network-changed-885142d2-3a31-487c-b773-a0b0df2e4e40. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 797.014884] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.025331] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.104234] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925386, 'name': ReconfigVM_Task, 'duration_secs': 0.410958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.106587] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfigured VM instance instance-0000001d to attach disk [datastore2] volume-71826ac2-cb8c-418c-90dc-110f088d489d/volume-71826ac2-cb8c-418c-90dc-110f088d489d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.113106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73fd8ba9-3a24-4a6c-b5ec-6b04766c2fb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.146761] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 797.146761] env[69994]: value = "task-2925391" [ 797.146761] env[69994]: _type = "Task" [ 797.146761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.160484] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.174682] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.202948] env[69994]: INFO nova.compute.manager [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Took 56.89 seconds to build instance. [ 797.209431] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925388, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078162} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.210488] env[69994]: DEBUG nova.network.neutron [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.211665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.212674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c08079-dbbf-49df-963a-5f378830235d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.240810] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 9a1343a8-11b4-4c9e-8445-931eab036a4d/9a1343a8-11b4-4c9e-8445-931eab036a4d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.242050] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd61445c-292c-42a0-b092-6981f68d5c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.267468] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 797.267468] env[69994]: value = "task-2925392" [ 797.267468] env[69994]: _type = "Task" [ 797.267468] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.279650] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.495387] env[69994]: DEBUG oslo_vmware.api [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925389, 'name': RemoveSnapshot_Task, 'duration_secs': 0.411898} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.495730] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 797.495961] env[69994]: INFO nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Took 15.00 seconds to snapshot the instance on the hypervisor. [ 797.517400] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c671a6-da36-43f2-bc0d-628ea1a7aab0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.523909] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925390, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.532031] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc28cf1a-d697-40cc-8ef9-a69e5ff53efb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.565428] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5151502e-a602-490d-acb7-f1294557a3cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.576475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc0c1b7-ffbd-409d-8878-6836553436ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.591225] env[69994]: DEBUG nova.compute.provider_tree [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.641235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.641235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.641235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.641235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.641576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.643618] env[69994]: INFO nova.compute.manager [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Terminating instance [ 797.657275] env[69994]: DEBUG oslo_vmware.api [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925391, 'name': ReconfigVM_Task, 'duration_secs': 0.24386} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.657620] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587465', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'name': 'volume-71826ac2-cb8c-418c-90dc-110f088d489d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b00d09ea-5eee-47ed-adcb-288cdd362e89', 'attached_at': '', 'detached_at': '', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'serial': '71826ac2-cb8c-418c-90dc-110f088d489d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 797.677166] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.705304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64a9f7c6-a13b-4891-9e52-88c9c04e3935 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.394s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.713548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.713941] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Instance network_info: |[{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 797.714272] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.714455] env[69994]: DEBUG nova.network.neutron [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Refreshing network info cache for port 885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.715715] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:b6:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '885142d2-3a31-487c-b773-a0b0df2e4e40', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.723676] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.725758] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.725758] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2f5d531-637a-4109-a061-67d24b6a3ea1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.754876] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.754876] env[69994]: value = "task-2925393" [ 797.754876] env[69994]: _type = "Task" [ 797.754876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.766323] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925393, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.778326] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.917789] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.946847] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.947226] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.947270] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.947435] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.947582] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.947730] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.948032] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.948104] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.948268] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.948425] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.948599] env[69994]: DEBUG nova.virt.hardware [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.949492] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370ebe3c-25c9-419b-b0d2-c6883f388189 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.958308] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e15422-e356-4cb7-a839-206d449e70ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.002686] env[69994]: DEBUG nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance disappeared during snapshot {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 798.014296] env[69994]: DEBUG nova.compute.manager [None req-9428c40f-1eb7-4379-8929-b7b0b8771af1 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image not found during clean up e74b58ec-98de-4cab-ac70-0dc844126ba5 {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 798.019930] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.830452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.020460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c06a2540-e77d-48c0-967f-94e2a53c4d8f/c06a2540-e77d-48c0-967f-94e2a53c4d8f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.020681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.020944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f49586f6-db36-4213-bd86-4e187fe583cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.030102] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 798.030102] env[69994]: value = "task-2925394" [ 798.030102] env[69994]: _type = "Task" [ 798.030102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.042281] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.095373] env[69994]: DEBUG nova.scheduler.client.report [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.147266] env[69994]: DEBUG nova.compute.manager [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 798.150020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 798.150020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2f0dc5-476c-4fcc-82aa-c55575b701a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.158587] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 798.160025] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29006233-603c-4cda-a430-70661c44485c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.174774] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.210856] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.233569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 798.235024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 798.235024] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore1] 00ab07b7-e7ed-4a71-b684-d5af8b1b7616 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 798.235024] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43455940-3f4a-4594-9902-d2af4988ab40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.246030] env[69994]: DEBUG oslo_vmware.api [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 798.246030] env[69994]: value = "task-2925396" [ 798.246030] env[69994]: _type = "Task" [ 798.246030] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.258028] env[69994]: DEBUG oslo_vmware.api [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.270942] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925393, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.283557] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925392, 'name': ReconfigVM_Task, 'duration_secs': 0.590508} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.286706] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 9a1343a8-11b4-4c9e-8445-931eab036a4d/9a1343a8-11b4-4c9e-8445-931eab036a4d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.287638] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5798920f-ce26-464a-9c54-ba5936056988 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.298143] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 798.298143] env[69994]: value = "task-2925397" [ 798.298143] env[69994]: _type = "Task" [ 798.298143] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.306640] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925397, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.344733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.344733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.542561] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077219} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.542865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.546011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e50066-42cc-4523-b019-070fbea5b003 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.572736] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] c06a2540-e77d-48c0-967f-94e2a53c4d8f/c06a2540-e77d-48c0-967f-94e2a53c4d8f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.575187] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b7f7a1e-9d97-404d-ba15-b19503b58a2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.597120] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 798.597120] env[69994]: value = "task-2925398" [ 798.597120] env[69994]: _type = "Task" [ 798.597120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.601125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.601602] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.613848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.048s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.613848] env[69994]: INFO nova.compute.claims [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.615283] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925398, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.640261] env[69994]: DEBUG nova.network.neutron [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updated VIF entry in instance network info cache for port 885142d2-3a31-487c-b773-a0b0df2e4e40. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.640261] env[69994]: DEBUG nova.network.neutron [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.677180] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.729568] env[69994]: DEBUG nova.objects.instance [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lazy-loading 'flavor' on Instance uuid b00d09ea-5eee-47ed-adcb-288cdd362e89 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.733977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.758551] env[69994]: DEBUG oslo_vmware.api [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.761809] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 798.762275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 798.762522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.763076] env[69994]: INFO nova.compute.manager [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Took 0.61 seconds to destroy the instance on the hypervisor. [ 798.764217] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 798.764359] env[69994]: DEBUG nova.compute.manager [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 798.764459] env[69994]: DEBUG nova.network.neutron [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.772009] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925393, 'name': CreateVM_Task, 'duration_secs': 0.61326} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.772328] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.772979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.773242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.773592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.774968] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897802ee-234f-4984-ba93-6c240172714b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.780656] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 798.780656] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5219f0c5-a745-a0e4-a0b0-e4f21b9ccee3" [ 798.780656] env[69994]: _type = "Task" [ 798.780656] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.792684] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5219f0c5-a745-a0e4-a0b0-e4f21b9ccee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.806711] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925397, 'name': Rename_Task, 'duration_secs': 0.176451} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.807951] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.808296] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-768f5849-0c5a-48c5-93f4-a30929f01d26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.817752] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 798.817752] env[69994]: value = "task-2925399" [ 798.817752] env[69994]: _type = "Task" [ 798.817752] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.830704] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.118872] env[69994]: DEBUG nova.compute.utils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 799.122329] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925398, 'name': ReconfigVM_Task, 'duration_secs': 0.280811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.122739] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Successfully updated port: c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.124011] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.127366] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.128152] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Reconfigured VM instance instance-0000002a to attach disk [datastore2] c06a2540-e77d-48c0-967f-94e2a53c4d8f/c06a2540-e77d-48c0-967f-94e2a53c4d8f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.129527] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59d84286-dd43-432b-a81c-82812de74ea2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.141015] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 799.141015] env[69994]: value = "task-2925400" [ 799.141015] env[69994]: _type = "Task" [ 799.141015] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.146170] env[69994]: DEBUG oslo_concurrency.lockutils [req-4cbad06f-7307-44db-b4e0-492d781cb850 req-8b6cc6dc-d068-44c5-80ff-2730bf78c268 service nova] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.152572] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925400, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.175677] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.221927] env[69994]: DEBUG nova.compute.manager [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Received event network-vif-plugged-c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.224588] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] Acquiring lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.224705] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.224984] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.225314] env[69994]: DEBUG nova.compute.manager [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] No waiting events found dispatching network-vif-plugged-c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.225643] env[69994]: WARNING nova.compute.manager [req-b3ff3ffe-9ff7-41df-b582-72e9f81acd3b req-4083cc8c-ded6-46ac-8e0e-38b600694837 service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Received unexpected event network-vif-plugged-c3dea188-eaa9-40c8-ad7a-c49683af00cb for instance with vm_state building and task_state spawning. [ 799.239729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2b7c9e1-77fa-4509-a9d0-03f70892151d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.400s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.250128] env[69994]: DEBUG nova.compute.manager [req-57c8f45d-7a86-4884-9bfe-f7a765c82bee req-3ca30648-4ebf-4e5e-ae55-8fdad78e3eb7 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Received event network-vif-deleted-f3fc1328-25fb-4ac4-ab6e-c522b2ccc666 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.250374] env[69994]: INFO nova.compute.manager [req-57c8f45d-7a86-4884-9bfe-f7a765c82bee req-3ca30648-4ebf-4e5e-ae55-8fdad78e3eb7 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Neutron deleted interface f3fc1328-25fb-4ac4-ab6e-c522b2ccc666; detaching it from the instance and deleting it from the info cache [ 799.250571] env[69994]: DEBUG nova.network.neutron [req-57c8f45d-7a86-4884-9bfe-f7a765c82bee req-3ca30648-4ebf-4e5e-ae55-8fdad78e3eb7 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.276668] env[69994]: DEBUG nova.policy [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30a6e1760a2b425b9f766428e668966f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e00e07d25fbb4808a115bb959bab8456', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.296085] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5219f0c5-a745-a0e4-a0b0-e4f21b9ccee3, 'name': SearchDatastore_Task, 'duration_secs': 0.020483} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.296085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.296085] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.296085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.296085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.296085] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.296085] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86cd1a1d-ab61-453f-8523-2ddfca6b0c93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.308450] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.308675] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.309490] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c89639ad-597b-4042-b2c5-9c3bdaa35e7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.322021] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 799.322021] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298ee57-de58-991a-733e-43da18cd362a" [ 799.322021] env[69994]: _type = "Task" [ 799.322021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.340207] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298ee57-de58-991a-733e-43da18cd362a, 'name': SearchDatastore_Task, 'duration_secs': 0.012679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.344559] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925399, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.344559] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9224326f-f4f0-4f28-810a-72129dafd81d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.353589] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 799.353589] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a7c8c2-537f-4c0e-416d-a19d5e3654a7" [ 799.353589] env[69994]: _type = "Task" [ 799.353589] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.363670] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a7c8c2-537f-4c0e-416d-a19d5e3654a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.555358] env[69994]: DEBUG nova.network.neutron [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.624998] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.632223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.632223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.632223] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.666552] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925400, 'name': Rename_Task, 'duration_secs': 0.169209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.666999] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.673877] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28e87564-e8ce-4d21-9bc5-54f176899612 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.686300] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.687919] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 799.687919] env[69994]: value = "task-2925401" [ 799.687919] env[69994]: _type = "Task" [ 799.687919] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.753836] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04bca70e-802a-4d75-96bc-ae46f30750f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.764845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f2ee53-4f66-43cb-8ea0-1ce322d62340 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.808677] env[69994]: DEBUG nova.compute.manager [req-57c8f45d-7a86-4884-9bfe-f7a765c82bee req-3ca30648-4ebf-4e5e-ae55-8fdad78e3eb7 service nova] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Detach interface failed, port_id=f3fc1328-25fb-4ac4-ab6e-c522b2ccc666, reason: Instance 00ab07b7-e7ed-4a71-b684-d5af8b1b7616 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 799.831911] env[69994]: DEBUG oslo_vmware.api [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925399, 'name': PowerOnVM_Task, 'duration_secs': 0.633493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.832324] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.832527] env[69994]: INFO nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Took 9.87 seconds to spawn the instance on the hypervisor. [ 799.832703] env[69994]: DEBUG nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.833524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df02fcb-dc66-45f6-9d0d-4de784b5b286 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.866042] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a7c8c2-537f-4c0e-416d-a19d5e3654a7, 'name': SearchDatastore_Task, 'duration_secs': 0.019764} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.866042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.866042] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.866042] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c798424-3bfc-449b-9b72-4e7ac12eb292 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.873291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.873291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.877430] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 799.877430] env[69994]: value = "task-2925402" [ 799.877430] env[69994]: _type = "Task" [ 799.877430] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.881903] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Successfully created port: c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.890123] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.058465] env[69994]: INFO nova.compute.manager [-] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Took 1.29 seconds to deallocate network for instance. [ 800.176551] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.178082] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.181752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b4c76a-f361-42fe-a1cf-45c620116677 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.193962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7791429-dcc8-4df3-a180-6ecd079fc676 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.203285] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925401, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.231307] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d77e83-1bb1-43f5-9eb1-871c8e5f0e7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.239350] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c3dcc8-7748-4b0f-9c2d-0b9112200ce1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.253893] env[69994]: DEBUG nova.compute.provider_tree [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.353477] env[69994]: INFO nova.compute.manager [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Took 52.13 seconds to build instance. [ 800.379651] env[69994]: INFO nova.compute.manager [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Detaching volume 71826ac2-cb8c-418c-90dc-110f088d489d [ 800.385801] env[69994]: DEBUG nova.network.neutron [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Updating instance_info_cache with network_info: [{"id": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "address": "fa:16:3e:4e:9c:4d", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3dea188-ea", "ovs_interfaceid": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.397561] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925402, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.427326] env[69994]: INFO nova.virt.block_device [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Attempting to driver detach volume 71826ac2-cb8c-418c-90dc-110f088d489d from mountpoint /dev/sdb [ 800.427569] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 800.428025] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587465', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'name': 'volume-71826ac2-cb8c-418c-90dc-110f088d489d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b00d09ea-5eee-47ed-adcb-288cdd362e89', 'attached_at': '', 'detached_at': '', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'serial': '71826ac2-cb8c-418c-90dc-110f088d489d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 800.428698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ac3f61-dd98-4c81-babe-0eebc1dd5395 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.458632] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217b1252-42d2-48ed-91a4-52620f8fc012 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.466550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a56f6e-e04e-41cd-a7e5-59d04505045b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.491278] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d172e485-42a8-4189-9e41-80372cddd0d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.509164] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] The volume has not been displaced from its original location: [datastore2] volume-71826ac2-cb8c-418c-90dc-110f088d489d/volume-71826ac2-cb8c-418c-90dc-110f088d489d.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 800.513526] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfiguring VM instance instance-0000001d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 800.513987] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c36784ff-375b-4899-b790-d2e505b76285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.534192] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 800.534192] env[69994]: value = "task-2925403" [ 800.534192] env[69994]: _type = "Task" [ 800.534192] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.545294] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.569590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.635117] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.673948] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.674243] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.674400] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.674578] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.674724] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.674858] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.675138] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.675329] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.675553] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.675792] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.676038] env[69994]: DEBUG nova.virt.hardware [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.676986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e47197-b0fd-4f39-ae40-e88680808888 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.686847] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.697184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d72ec69-fefe-414e-8a5d-26db29a59c20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.705776] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925401, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.793223] env[69994]: ERROR nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [req-caaa0d2e-7f85-4d6c-a2e9-b844abd4eb07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-caaa0d2e-7f85-4d6c-a2e9-b844abd4eb07"}]} [ 800.812123] env[69994]: DEBUG nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 800.833234] env[69994]: DEBUG nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 800.833234] env[69994]: DEBUG nova.compute.provider_tree [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.848241] env[69994]: DEBUG nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 800.855279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a36f0edc-7e5d-4eef-9c5e-4ddd0b94da99 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.162s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.874021] env[69994]: DEBUG nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 800.892049] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.8716} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.892557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.892874] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Instance network_info: |[{"id": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "address": "fa:16:3e:4e:9c:4d", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3dea188-ea", "ovs_interfaceid": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.893174] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.893423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.893841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:9c:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3dea188-eaa9-40c8-ad7a-c49683af00cb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.908155] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 800.908431] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-608c8a9f-3d63-4963-a004-f6ad9d01cc73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.916182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.916808] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c08b17d-05e8-460e-a7c3-00304e6bbcef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.940214] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 800.940214] env[69994]: value = "task-2925404" [ 800.940214] env[69994]: _type = "Task" [ 800.940214] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.943368] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.943368] env[69994]: value = "task-2925405" [ 800.943368] env[69994]: _type = "Task" [ 800.943368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.950216] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.957812] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925405, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.049653] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925403, 'name': ReconfigVM_Task, 'duration_secs': 0.383414} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.049958] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Reconfigured VM instance instance-0000001d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 801.055192] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b76aef7-3b2e-4548-9f14-0f15e0f6f6dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.076511] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 801.076511] env[69994]: value = "task-2925406" [ 801.076511] env[69994]: _type = "Task" [ 801.076511] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.085685] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925406, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.185263] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.199982] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925401, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.358507] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 801.451792] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086275} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.457706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.458778] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b953c2-ab6e-4be8-be52-7322d5a3e31f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.468845] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925405, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.489552] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.490996] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94ff4c8c-11d0-4659-a364-de08473d9c7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.510206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742dad87-fbc1-4569-b6a8-17c39a521d4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.519998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1131114d-3bfa-4659-963e-2626f9814852 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.523415] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 801.523415] env[69994]: value = "task-2925407" [ 801.523415] env[69994]: _type = "Task" [ 801.523415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.556590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd96a53f-9474-430b-b8ad-5615020df2ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.560731] env[69994]: DEBUG nova.compute.manager [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Received event network-changed-c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 801.560925] env[69994]: DEBUG nova.compute.manager [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Refreshing instance network info cache due to event network-changed-c3dea188-eaa9-40c8-ad7a-c49683af00cb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 801.561163] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Acquiring lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.561302] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Acquired lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.561457] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Refreshing network info cache for port c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.568715] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.575093] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9d6eec-0264-471f-9898-1da80bf8998f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.593951] env[69994]: DEBUG nova.compute.provider_tree [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.601548] env[69994]: DEBUG oslo_vmware.api [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925406, 'name': ReconfigVM_Task, 'duration_secs': 0.182166} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.601548] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587465', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'name': 'volume-71826ac2-cb8c-418c-90dc-110f088d489d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b00d09ea-5eee-47ed-adcb-288cdd362e89', 'attached_at': '', 'detached_at': '', 'volume_id': '71826ac2-cb8c-418c-90dc-110f088d489d', 'serial': '71826ac2-cb8c-418c-90dc-110f088d489d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 801.681889] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.700335] env[69994]: DEBUG oslo_vmware.api [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925401, 'name': PowerOnVM_Task, 'duration_secs': 1.523045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.700617] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.700818] env[69994]: INFO nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Took 9.12 seconds to spawn the instance on the hypervisor. [ 801.700997] env[69994]: DEBUG nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.701889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecffc92-4e70-4cb1-97ad-4ba4bb2f0259 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.871766] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Successfully updated port: c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.883419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.962682] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925405, 'name': CreateVM_Task, 'duration_secs': 0.559035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.962855] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.967025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.967025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.967025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 801.967025] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a21bcb2a-932f-4793-ac36-926e90148f18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.969273] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 801.969273] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526f2455-7f8a-601c-5137-4f96d73330fa" [ 801.969273] env[69994]: _type = "Task" [ 801.969273] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.978057] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526f2455-7f8a-601c-5137-4f96d73330fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.033288] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.070937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.071483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.073538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.073538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.073538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.075095] env[69994]: INFO nova.compute.manager [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Terminating instance [ 802.137049] env[69994]: DEBUG nova.scheduler.client.report [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 802.137049] env[69994]: DEBUG nova.compute.provider_tree [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 65 to 66 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 802.137049] env[69994]: DEBUG nova.compute.provider_tree [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.179592] env[69994]: DEBUG nova.objects.instance [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lazy-loading 'flavor' on Instance uuid b00d09ea-5eee-47ed-adcb-288cdd362e89 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.196022] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.226046] env[69994]: INFO nova.compute.manager [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Took 47.64 seconds to build instance. [ 802.373363] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Updated VIF entry in instance network info cache for port c3dea188-eaa9-40c8-ad7a-c49683af00cb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 802.373614] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Updating instance_info_cache with network_info: [{"id": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "address": "fa:16:3e:4e:9c:4d", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3dea188-ea", "ovs_interfaceid": "c3dea188-eaa9-40c8-ad7a-c49683af00cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.375654] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.377775] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquired lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.377775] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.483102] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526f2455-7f8a-601c-5137-4f96d73330fa, 'name': SearchDatastore_Task, 'duration_secs': 0.018638} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.483102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.483102] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.483102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.483102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.483102] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.483102] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6fbdb50-4516-4565-a082-2d7f56f229eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.499518] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.499724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.500483] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72370aec-abce-4b39-a0ce-17a71cb9bb5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.507230] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 802.507230] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9b06c-cb2c-006a-ae8a-f97249f29e54" [ 802.507230] env[69994]: _type = "Task" [ 802.507230] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.515921] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9b06c-cb2c-006a-ae8a-f97249f29e54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.533025] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925407, 'name': ReconfigVM_Task, 'duration_secs': 0.737387} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.533327] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.533999] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43703187-0cd7-428d-9c86-205b69d77eea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.540739] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 802.540739] env[69994]: value = "task-2925408" [ 802.540739] env[69994]: _type = "Task" [ 802.540739] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.548806] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925408, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.579737] env[69994]: DEBUG nova.compute.manager [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.579963] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.580880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49595bf4-86d1-4709-9d2b-deee27f4f63c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.588513] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.588930] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2dc1d8d-230d-4ab4-bbe0-b4bc32d2d29d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.595231] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 802.595231] env[69994]: value = "task-2925409" [ 802.595231] env[69994]: _type = "Task" [ 802.595231] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.603176] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.643738] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.033s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.644350] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.647612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.893s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.652494] env[69994]: INFO nova.compute.claims [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.683389] env[69994]: DEBUG oslo_vmware.api [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925387, 'name': ReconfigVM_Task, 'duration_secs': 5.788415} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.683725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.683982] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 802.727029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-255911b7-83bc-4048-b761-0ab93bd4176c tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.861s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.839740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "9d146d57-9948-4b18-a3f3-675b53d137ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.840096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.878680] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Releasing lock "refresh_cache-9269e42b-b05c-4c88-9008-aaeda4b0248f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.878979] env[69994]: DEBUG nova.compute.manager [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Received event network-changed-ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.879571] env[69994]: DEBUG nova.compute.manager [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Refreshing instance network info cache due to event network-changed-ecb02147-aeb7-4256-9ce1-e20d727853b4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 802.879724] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Acquiring lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.879875] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Acquired lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.880203] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Refreshing network info cache for port ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.022773] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9b06c-cb2c-006a-ae8a-f97249f29e54, 'name': SearchDatastore_Task, 'duration_secs': 0.026055} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.023340] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde6c855-03db-4d7d-8c13-a2ae0ecc9b50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.029820] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 803.029820] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e18a55-346b-268f-b881-17ed795b9689" [ 803.029820] env[69994]: _type = "Task" [ 803.029820] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.038876] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e18a55-346b-268f-b881-17ed795b9689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.053500] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925408, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.106202] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925409, 'name': PowerOffVM_Task, 'duration_secs': 0.253821} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.106896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.107274] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.107654] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-418ee3a0-0847-4925-b8c1-a09bfa2481be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.144829] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.156542] env[69994]: DEBUG nova.compute.utils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.161157] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.161552] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.181496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.181496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.181496] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Deleting the datastore file [datastore2] 9a1343a8-11b4-4c9e-8445-931eab036a4d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.184083] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14ed5fe9-b40f-4415-9c4e-d553cb12ba84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.195076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd502bc9-8640-431e-b744-9a62591149b2 tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.322s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.201847] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for the task: (returnval){ [ 803.201847] env[69994]: value = "task-2925411" [ 803.201847] env[69994]: _type = "Task" [ 803.201847] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.212197] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.234045] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 803.242085] env[69994]: DEBUG nova.policy [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5667c67d9b6f44138d1479e901b60c74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34469ad51e694a3389595c28ef508144', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.336821] env[69994]: DEBUG nova.compute.manager [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Received event network-changed-20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.337095] env[69994]: DEBUG nova.compute.manager [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Refreshing instance network info cache due to event network-changed-20b9c01f-3830-45bc-82b7-4b7014586c1c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.337322] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] Acquiring lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.337532] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] Acquired lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.337626] env[69994]: DEBUG nova.network.neutron [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Refreshing network info cache for port 20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.416955] env[69994]: DEBUG nova.network.neutron [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Updating instance_info_cache with network_info: [{"id": "c7ee288e-97b3-4ede-8384-13bef028a530", "address": "fa:16:3e:16:e2:c6", "network": {"id": "5d4fba88-38bf-42f6-9487-5a42bef1efde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093470465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00e07d25fbb4808a115bb959bab8456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ee288e-97", "ovs_interfaceid": "c7ee288e-97b3-4ede-8384-13bef028a530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.544678] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e18a55-346b-268f-b881-17ed795b9689, 'name': SearchDatastore_Task, 'duration_secs': 0.018552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.548751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.549173] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9269e42b-b05c-4c88-9008-aaeda4b0248f/9269e42b-b05c-4c88-9008-aaeda4b0248f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.549818] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99031db1-f98b-47b4-9b30-f1d2604ecfe7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.564773] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925408, 'name': Rename_Task, 'duration_secs': 0.896894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.566798] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.567913] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 803.567913] env[69994]: value = "task-2925412" [ 803.567913] env[69994]: _type = "Task" [ 803.567913] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.571293] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a9f2cba-fb1d-41f9-bc51-60a765c15821 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.579776] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.581398] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 803.581398] env[69994]: value = "task-2925413" [ 803.581398] env[69994]: _type = "Task" [ 803.581398] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.594851] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925413, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.657425] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Received event network-vif-plugged-c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.657425] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Acquiring lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.657425] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.657425] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.657425] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] No waiting events found dispatching network-vif-plugged-c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 803.657425] env[69994]: WARNING nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Received unexpected event network-vif-plugged-c7ee288e-97b3-4ede-8384-13bef028a530 for instance with vm_state building and task_state spawning. [ 803.657425] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Received event network-changed-c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.657425] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Refreshing instance network info cache due to event network-changed-c7ee288e-97b3-4ede-8384-13bef028a530. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.657425] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Acquiring lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.665303] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 803.718634] env[69994]: DEBUG oslo_vmware.api [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Task: {'id': task-2925411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307123} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.720035] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.720035] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.720035] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.720035] env[69994]: INFO nova.compute.manager [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 803.720035] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.720035] env[69994]: DEBUG nova.compute.manager [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.720035] env[69994]: DEBUG nova.network.neutron [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.767897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.923185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Releasing lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.928603] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Instance network_info: |[{"id": "c7ee288e-97b3-4ede-8384-13bef028a530", "address": "fa:16:3e:16:e2:c6", "network": {"id": "5d4fba88-38bf-42f6-9487-5a42bef1efde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093470465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00e07d25fbb4808a115bb959bab8456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ee288e-97", "ovs_interfaceid": "c7ee288e-97b3-4ede-8384-13bef028a530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 803.928603] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Acquired lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.928603] env[69994]: DEBUG nova.network.neutron [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Refreshing network info cache for port c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.930199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:e2:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7ee288e-97b3-4ede-8384-13bef028a530', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.940897] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Creating folder: Project (e00e07d25fbb4808a115bb959bab8456). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 803.948491] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Successfully created port: 264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.951736] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-705bd110-b00c-494f-a42d-b86e645f631c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.968438] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Created folder: Project (e00e07d25fbb4808a115bb959bab8456) in parent group-v587342. [ 803.968438] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Creating folder: Instances. Parent ref: group-v587474. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 803.968685] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71b8a9cf-8204-4915-89fb-0e4aeb4c8af7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.976017] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updated VIF entry in instance network info cache for port ecb02147-aeb7-4256-9ce1-e20d727853b4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.976017] env[69994]: DEBUG nova.network.neutron [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updating instance_info_cache with network_info: [{"id": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "address": "fa:16:3e:eb:df:d3", "network": {"id": "182d9a94-1841-4933-bfdc-a6e1bcd6de39", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1233445263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "069aad9dee5a434383cecdd983f451b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb02147-ae", "ovs_interfaceid": "ecb02147-aeb7-4256-9ce1-e20d727853b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.985128] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Created folder: Instances in parent group-v587474. [ 803.985397] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.989041] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.990029] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46918baf-13e3-4a13-b182-caad853523c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.030226] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.030226] env[69994]: value = "task-2925416" [ 804.030226] env[69994]: _type = "Task" [ 804.030226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.045123] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925416, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.090112] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925412, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.110049] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925413, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.165520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.165971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.166299] env[69994]: DEBUG nova.network.neutron [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.384851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e033fae0-31e2-4abf-8721-0695ce8ec29e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.394596] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3317e41-a50b-46ef-83ff-d122e94d2f77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.439997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c73e1db-aa55-4a54-8afd-bbaaf4ea8177 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.451481] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadfa437-7ea8-472e-a874-dbc735c622cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.471531] env[69994]: DEBUG nova.compute.provider_tree [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.479585] env[69994]: DEBUG oslo_concurrency.lockutils [req-e1a29ccc-7eb1-45d1-82d8-b463cd61fa02 req-0936902c-cdf9-4207-a3eb-44b86855c4fb service nova] Releasing lock "refresh_cache-f1f0d79f-dc67-4cf9-816c-c451f20d65ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.524489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.524752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.524970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.525163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.525333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.527626] env[69994]: INFO nova.compute.manager [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Terminating instance [ 804.540550] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925416, 'name': CreateVM_Task, 'duration_secs': 0.468453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.541768] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.542533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.542699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.543032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 804.543553] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8ab4a71-d61c-4423-84c7-c82785460138 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.546431] env[69994]: DEBUG nova.network.neutron [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updated VIF entry in instance network info cache for port 20b9c01f-3830-45bc-82b7-4b7014586c1c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.546796] env[69994]: DEBUG nova.network.neutron [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updating instance_info_cache with network_info: [{"id": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "address": "fa:16:3e:93:ca:50", "network": {"id": "7bf48270-52c6-4ea7-9eee-06d41454c823", "bridge": "br-int", "label": "tempest-ServersTestJSON-13152800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a14bacc0a843b2840c52111795ab8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "304be4f7-4e36-4468-9ef4-e457341cef18", "external-id": "nsx-vlan-transportzone-911", "segmentation_id": 911, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20b9c01f-38", "ovs_interfaceid": "20b9c01f-3830-45bc-82b7-4b7014586c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.552132] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 804.552132] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5279da63-6559-0070-5317-d831ecc5aad7" [ 804.552132] env[69994]: _type = "Task" [ 804.552132] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.561987] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5279da63-6559-0070-5317-d831ecc5aad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.570343] env[69994]: DEBUG nova.network.neutron [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Updated VIF entry in instance network info cache for port c7ee288e-97b3-4ede-8384-13bef028a530. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.570694] env[69994]: DEBUG nova.network.neutron [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Updating instance_info_cache with network_info: [{"id": "c7ee288e-97b3-4ede-8384-13bef028a530", "address": "fa:16:3e:16:e2:c6", "network": {"id": "5d4fba88-38bf-42f6-9487-5a42bef1efde", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093470465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00e07d25fbb4808a115bb959bab8456", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ee288e-97", "ovs_interfaceid": "c7ee288e-97b3-4ede-8384-13bef028a530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.581645] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.581915] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9269e42b-b05c-4c88-9008-aaeda4b0248f/9269e42b-b05c-4c88-9008-aaeda4b0248f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.582143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.582398] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24131f72-b093-42b7-bd49-cc7e840afca7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.588703] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 804.588703] env[69994]: value = "task-2925417" [ 804.588703] env[69994]: _type = "Task" [ 804.588703] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.607055] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925417, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.607337] env[69994]: DEBUG oslo_vmware.api [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925413, 'name': PowerOnVM_Task, 'duration_secs': 0.927751} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.607586] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.607793] env[69994]: INFO nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Took 9.40 seconds to spawn the instance on the hypervisor. [ 804.607977] env[69994]: DEBUG nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 804.608848] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f905bdf1-9ec0-4c8f-a0bf-dc2553d04336 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.694652] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 804.719103] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.719356] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.719512] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.719692] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.719837] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.719980] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.720209] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.720367] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.720530] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.720688] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.720856] env[69994]: DEBUG nova.virt.hardware [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.721717] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac4923f-a367-4502-872e-1009dcd7949e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.730854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e337d802-33e2-4b5e-963e-0812f9977f66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.948295] env[69994]: INFO nova.network.neutron [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Port 016f8957-6a5e-4487-b3e5-cb437366c800 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 804.948295] env[69994]: DEBUG nova.network.neutron [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.977270] env[69994]: DEBUG nova.scheduler.client.report [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.000208] env[69994]: DEBUG nova.network.neutron [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.031494] env[69994]: DEBUG nova.compute.manager [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 805.031735] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 805.032645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad64dfa-3dac-41cd-93ea-6752a9ad3fb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.040887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 805.041163] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1e97d22-0ac5-4006-9ec5-b1ea48fce013 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.047593] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 805.047593] env[69994]: value = "task-2925418" [ 805.047593] env[69994]: _type = "Task" [ 805.047593] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.051203] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6849469-7bca-4cc6-bb5d-85bdb68f9c6d req-ee411335-ca52-4779-9ab3-bf922897295d service nova] Releasing lock "refresh_cache-c06a2540-e77d-48c0-967f-94e2a53c4d8f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.058192] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.063373] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5279da63-6559-0070-5317-d831ecc5aad7, 'name': SearchDatastore_Task, 'duration_secs': 0.028698} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.063648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.063869] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.064116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.064261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.064440] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.064692] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-174fe8d9-cfb0-4ed1-96dc-a430f2736f67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.077546] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Releasing lock "refresh_cache-3c814c83-20cc-4871-9f30-5c0c7d99b8a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.077798] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-vif-deleted-016f8957-6a5e-4487-b3e5-cb437366c800 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.077977] env[69994]: INFO nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Neutron deleted interface 016f8957-6a5e-4487-b3e5-cb437366c800; detaching it from the instance and deleting it from the info cache [ 805.078817] env[69994]: DEBUG nova.network.neutron [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [{"id": "8b639504-b3a0-4772-9a06-af40fbe1667e", "address": "fa:16:3e:d3:a3:94", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b639504-b3", "ovs_interfaceid": "8b639504-b3a0-4772-9a06-af40fbe1667e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.079360] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.079530] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.080449] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a88fc86-76fb-4abf-ae1e-1bd048828ade {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.086073] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 805.086073] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525dd351-bc1e-51a8-ec06-ffc559c74a43" [ 805.086073] env[69994]: _type = "Task" [ 805.086073] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.097384] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525dd351-bc1e-51a8-ec06-ffc559c74a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.101364] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925417, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078818} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.101874] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.103021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.103241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.104852] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e373d0e-0542-4ca9-bfe0-1b52cb6edcb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.128706] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 9269e42b-b05c-4c88-9008-aaeda4b0248f/9269e42b-b05c-4c88-9008-aaeda4b0248f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.133186] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76fb57c5-425a-4f89-b6b3-1c3f8d61cccc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.151432] env[69994]: INFO nova.compute.manager [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Took 48.99 seconds to build instance. [ 805.157885] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 805.157885] env[69994]: value = "task-2925419" [ 805.157885] env[69994]: _type = "Task" [ 805.157885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.170208] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925419, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.445621] env[69994]: DEBUG nova.compute.manager [req-7edde259-259d-425d-88c2-44a48ff7c8de req-097ced65-6910-47fb-83d7-b6b8bba183ab service nova] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Received event network-vif-deleted-359d6f2e-895c-4d81-afd6-815307b7c4e9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.450045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.482581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.482925] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 805.485466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.485689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.486166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.486383] env[69994]: DEBUG nova.objects.instance [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'resources' on Instance uuid 180b4236-289c-4818-885d-c66e9e9a2ea8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.502999] env[69994]: INFO nova.compute.manager [-] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Took 1.78 seconds to deallocate network for instance. [ 805.558122] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925418, 'name': PowerOffVM_Task, 'duration_secs': 0.441897} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.558410] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.558576] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.558824] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6b1c4a4-2b73-41c6-8e39-64d48c1cbc12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.581702] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Acquiring lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.596505] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525dd351-bc1e-51a8-ec06-ffc559c74a43, 'name': SearchDatastore_Task, 'duration_secs': 0.045278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.597443] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52d45417-4435-47ac-9bce-ebb7981ec078 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.602470] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 805.602470] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529031a7-7bf5-0d48-80e9-4cded5797607" [ 805.602470] env[69994]: _type = "Task" [ 805.602470] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.611576] env[69994]: DEBUG nova.compute.utils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 805.612453] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529031a7-7bf5-0d48-80e9-4cded5797607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.620290] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 805.620502] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 805.620682] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleting the datastore file [datastore1] e46b8a11-650a-4e34-bc4a-e1c1b2515e76 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.620933] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fee7aed5-ae2a-437d-9eb4-54892d87c0a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.628363] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 805.628363] env[69994]: value = "task-2925421" [ 805.628363] env[69994]: _type = "Task" [ 805.628363] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.638752] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.653718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d5a1a4a0-a8ac-4005-a3d4-9c172a1fee0f tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.393s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.668739] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.751462] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Successfully updated port: 264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.759908] env[69994]: DEBUG nova.compute.manager [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Received event network-vif-plugged-264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.760479] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] Acquiring lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.760479] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.760776] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.761186] env[69994]: DEBUG nova.compute.manager [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] No waiting events found dispatching network-vif-plugged-264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.761249] env[69994]: WARNING nova.compute.manager [req-d5b63f19-730c-4f9e-af0b-dc2f05c7b47e req-58e59805-6cad-488c-9bfe-68e107a68f8f service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Received unexpected event network-vif-plugged-264f000d-41b7-4904-8621-8cd06efa69c8 for instance with vm_state building and task_state spawning. [ 805.953473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a92120c-d9ab-47df-a689-ea7a47a6681c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-e46b8a11-650a-4e34-bc4a-e1c1b2515e76-016f8957-6a5e-4487-b3e5-cb437366c800" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.925s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.988991] env[69994]: DEBUG nova.compute.utils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 805.992925] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 805.993036] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.009428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.035450] env[69994]: DEBUG nova.policy [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab615186df9346cc97536b09f6833520', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0aac564fe3e0434dbb936da74cb0b1d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 806.117242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.117836] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529031a7-7bf5-0d48-80e9-4cded5797607, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.118208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.118503] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 3c814c83-20cc-4871-9f30-5c0c7d99b8a1/3c814c83-20cc-4871-9f30-5c0c7d99b8a1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.119422] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0712dab5-fcca-48a5-8d1d-b3521a2e3896 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.126522] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 806.126522] env[69994]: value = "task-2925422" [ 806.126522] env[69994]: _type = "Task" [ 806.126522] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.140926] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.144110] env[69994]: DEBUG oslo_vmware.api [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.146798] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 806.147020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 806.147208] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.147417] env[69994]: INFO nova.compute.manager [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Took 1.12 seconds to destroy the instance on the hypervisor. [ 806.147619] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 806.148022] env[69994]: DEBUG nova.compute.manager [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 806.148125] env[69994]: DEBUG nova.network.neutron [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.156181] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.169833] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925419, 'name': ReconfigVM_Task, 'duration_secs': 0.618475} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.170118] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 9269e42b-b05c-4c88-9008-aaeda4b0248f/9269e42b-b05c-4c88-9008-aaeda4b0248f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.170775] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8a4ffd0-a3d5-4fb2-8527-318804148126 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.179399] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 806.179399] env[69994]: value = "task-2925423" [ 806.179399] env[69994]: _type = "Task" [ 806.179399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.193056] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925423, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.253495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.253640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.253884] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.348018] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Successfully created port: bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.498669] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 806.515390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbac8673-49e6-4413-8cf3-b0e80d0cd946 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.524223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13fe1c5-ee2e-474c-87ab-515ce650b2c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.562872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e6a901-cc1b-4f6d-ad0c-d1af50bb1234 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.576591] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a92426b-77b5-4561-8576-895bd07ef47e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.589816] env[69994]: DEBUG nova.compute.provider_tree [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.641769] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925422, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.679568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.691330] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925423, 'name': Rename_Task, 'duration_secs': 0.190497} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.692749] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.695779] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a039f554-550a-4405-a958-16ae9bbb8b60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.703536] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 806.703536] env[69994]: value = "task-2925424" [ 806.703536] env[69994]: _type = "Task" [ 806.703536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.712346] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925424, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.801922] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.007125] env[69994]: INFO nova.virt.block_device [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Booting with volume cd17a3cc-4285-4a72-b443-b0f9d28d0473 at /dev/sda [ 807.009383] env[69994]: DEBUG nova.network.neutron [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Updating instance_info_cache with network_info: [{"id": "264f000d-41b7-4904-8621-8cd06efa69c8", "address": "fa:16:3e:d7:4b:a5", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264f000d-41", "ovs_interfaceid": "264f000d-41b7-4904-8621-8cd06efa69c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.048314] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1333b629-e613-4be9-b8dc-db2949102a15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.057975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ce7305-3d14-4177-9ff9-2fadeff15690 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.100838] env[69994]: DEBUG nova.scheduler.client.report [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.105755] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fbf34dd-24d6-4dcf-b589-e6164d4a12e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.116273] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe2e92f-0fac-4def-9b1f-eaca349b8aff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.141334] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613916} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.155522] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 3c814c83-20cc-4871-9f30-5c0c7d99b8a1/3c814c83-20cc-4871-9f30-5c0c7d99b8a1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.155817] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.156613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a10b151-2ad6-46a3-99d9-32dc201ba87b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.159338] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e178a4-eb09-4e35-b305-af5a123f5b91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.168320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6f4750-472b-4266-8623-255617390f3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.171100] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 807.171100] env[69994]: value = "task-2925425" [ 807.171100] env[69994]: _type = "Task" [ 807.171100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.179287] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.183789] env[69994]: DEBUG nova.virt.block_device [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating existing volume attachment record: 0b0b68f5-c691-4265-a536-eabe86757d9d {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 807.200049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.200049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.200049] env[69994]: INFO nova.compute.manager [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Attaching volume 36d050ad-1ac5-4518-a08c-d07445bd0225 to /dev/sdb [ 807.217778] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925424, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.244631] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a5397d-691c-4644-a6d4-4976be74fe6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.254333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87d57b1-c4ad-4b54-bbd6-e05cadf8535a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.270417] env[69994]: DEBUG nova.virt.block_device [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating existing volume attachment record: 00aee61f-46cf-426b-9cfd-bb7bc149df5d {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 807.424433] env[69994]: DEBUG nova.network.neutron [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.486366] env[69994]: DEBUG nova.compute.manager [req-603124ee-ed18-4cf7-9a56-ce6a309ddabd req-e06e0c0d-8c6d-4e6c-ab31-7edb5b74bc7c service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Received event network-vif-deleted-8b639504-b3a0-4772-9a06-af40fbe1667e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.511785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.512164] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Instance network_info: |[{"id": "264f000d-41b7-4904-8621-8cd06efa69c8", "address": "fa:16:3e:d7:4b:a5", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264f000d-41", "ovs_interfaceid": "264f000d-41b7-4904-8621-8cd06efa69c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.512557] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:4b:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '264f000d-41b7-4904-8621-8cd06efa69c8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.520881] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.521708] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.521942] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58493019-bd2a-4341-960e-f7ced039ad10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.541514] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.541514] env[69994]: value = "task-2925427" [ 807.541514] env[69994]: _type = "Task" [ 807.541514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.550672] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925427, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.610283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.124s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.614178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.200s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.614433] env[69994]: DEBUG nova.objects.instance [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lazy-loading 'resources' on Instance uuid 6fb97a65-bf0b-4e79-9611-f0f3179661b5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.629339] env[69994]: INFO nova.scheduler.client.report [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted allocations for instance 180b4236-289c-4818-885d-c66e9e9a2ea8 [ 807.680516] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.680792] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.681610] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cb7af8-cd19-4964-bb86-c8e1fc39c79f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.705166] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 3c814c83-20cc-4871-9f30-5c0c7d99b8a1/3c814c83-20cc-4871-9f30-5c0c7d99b8a1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.706086] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00f5d254-019f-4655-a8dc-0393b4d6b586 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.728897] env[69994]: DEBUG oslo_vmware.api [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925424, 'name': PowerOnVM_Task, 'duration_secs': 0.648214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.730148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.730366] env[69994]: INFO nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Took 9.81 seconds to spawn the instance on the hypervisor. [ 807.730543] env[69994]: DEBUG nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.730916] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 807.730916] env[69994]: value = "task-2925430" [ 807.730916] env[69994]: _type = "Task" [ 807.730916] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.731574] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d24716-164e-4a91-b297-a244a0bd91fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.751670] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925430, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.796342] env[69994]: DEBUG nova.compute.manager [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Received event network-changed-264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.796534] env[69994]: DEBUG nova.compute.manager [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Refreshing instance network info cache due to event network-changed-264f000d-41b7-4904-8621-8cd06efa69c8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 807.796759] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] Acquiring lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.796902] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] Acquired lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.797093] env[69994]: DEBUG nova.network.neutron [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Refreshing network info cache for port 264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.930306] env[69994]: INFO nova.compute.manager [-] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Took 1.78 seconds to deallocate network for instance. [ 808.001938] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Successfully updated port: bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.053239] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925427, 'name': CreateVM_Task, 'duration_secs': 0.360132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.053434] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.054172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.054274] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.054605] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.054853] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf8e91ee-02f4-4f68-a075-942d7ac205c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.059521] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 808.059521] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298bbf4-6579-dae0-5e4d-b70bad9413d4" [ 808.059521] env[69994]: _type = "Task" [ 808.059521] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.067468] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298bbf4-6579-dae0-5e4d-b70bad9413d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.143502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a22957-9160-4dd3-ab4a-5d65db1c12e6 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "180b4236-289c-4818-885d-c66e9e9a2ea8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.138s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.248490] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.259644] env[69994]: INFO nova.compute.manager [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Took 47.36 seconds to build instance. [ 808.339806] env[69994]: DEBUG nova.compute.manager [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 808.440236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.504887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.505203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquired lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.505318] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 808.573243] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298bbf4-6579-dae0-5e4d-b70bad9413d4, 'name': SearchDatastore_Task, 'duration_secs': 0.035742} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.577016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.577770] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.578138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.578366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.578628] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.579760] env[69994]: DEBUG nova.network.neutron [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Updated VIF entry in instance network info cache for port 264f000d-41b7-4904-8621-8cd06efa69c8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.580214] env[69994]: DEBUG nova.network.neutron [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Updating instance_info_cache with network_info: [{"id": "264f000d-41b7-4904-8621-8cd06efa69c8", "address": "fa:16:3e:d7:4b:a5", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264f000d-41", "ovs_interfaceid": "264f000d-41b7-4904-8621-8cd06efa69c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.581935] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cab18597-c2dd-45bd-b8da-46d1b952b0b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.591948] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.592379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.597283] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98ca2084-c953-4728-885b-72bf6fd049f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.605411] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 808.605411] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261eb64-6cea-b799-0400-25bedbbdf2a4" [ 808.605411] env[69994]: _type = "Task" [ 808.605411] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.619666] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261eb64-6cea-b799-0400-25bedbbdf2a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.642368] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9b5c9e-2488-476b-b7f7-5fcaeb584d57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.650388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73645bf6-ac4a-4054-b3a8-2be072b6b4a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.687020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "e0764e41-0810-45a1-8917-ac901f0f8321" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.687020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.687020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.687020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.687020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.687020] env[69994]: INFO nova.compute.manager [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Terminating instance [ 808.688756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33820424-cc0d-46a7-804c-635caba929c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.698615] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb76f2ba-84d6-4375-8826-11ac3fc4fa1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.713405] env[69994]: DEBUG nova.compute.provider_tree [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.744835] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925430, 'name': ReconfigVM_Task, 'duration_secs': 0.84713} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.745159] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 3c814c83-20cc-4871-9f30-5c0c7d99b8a1/3c814c83-20cc-4871-9f30-5c0c7d99b8a1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.746667] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-939c271d-38de-4f57-8e02-da4549e5054c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.752219] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 808.752219] env[69994]: value = "task-2925431" [ 808.752219] env[69994]: _type = "Task" [ 808.752219] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.760435] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925431, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.761160] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbb41bd7-058e-4f35-9418-031b7de1fc7a tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.093s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.860295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.049485] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.085929] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c4c0737-e3bc-4f6d-b1ac-aa714cc1f28a req-b434975f-4068-4589-930b-aef16a676337 service nova] Releasing lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.117465] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261eb64-6cea-b799-0400-25bedbbdf2a4, 'name': SearchDatastore_Task, 'duration_secs': 0.015836} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.118486] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d2b996c-6ddc-4618-82b4-0b95c9b3a2c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.124209] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 809.124209] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ccff0b-6562-26e9-fa7b-f56225c75a0f" [ 809.124209] env[69994]: _type = "Task" [ 809.124209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.132934] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ccff0b-6562-26e9-fa7b-f56225c75a0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.193635] env[69994]: DEBUG nova.compute.manager [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 809.193839] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.194752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558bdf1b-2139-4bf2-9115-3d61dcaafc03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.199331] env[69994]: DEBUG nova.network.neutron [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating instance_info_cache with network_info: [{"id": "bd1e50cf-3e19-4962-b159-76798af793d4", "address": "fa:16:3e:79:f2:7d", "network": {"id": "ae1a78e1-c3f4-419c-8812-5162e8e8a737", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1539160573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aac564fe3e0434dbb936da74cb0b1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd1e50cf-3e", "ovs_interfaceid": "bd1e50cf-3e19-4962-b159-76798af793d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.202550] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.202986] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73421663-3d70-4b62-b10e-a3a348991240 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.209303] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 809.209303] env[69994]: value = "task-2925432" [ 809.209303] env[69994]: _type = "Task" [ 809.209303] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.216167] env[69994]: DEBUG nova.scheduler.client.report [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.224486] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.263356] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.266374] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925431, 'name': Rename_Task, 'duration_secs': 0.411681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.266908] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.267213] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bc65011-7925-4560-b302-d22dfeed85c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.273958] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 809.273958] env[69994]: value = "task-2925433" [ 809.273958] env[69994]: _type = "Task" [ 809.273958] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.283120] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.300204] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 809.300688] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 809.300894] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.301058] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 809.301248] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.301389] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 809.301529] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 809.301927] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 809.301927] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 809.302049] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 809.302211] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 809.302382] env[69994]: DEBUG nova.virt.hardware [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 809.303968] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e02b4b0-6288-443b-aed7-40d9f9eac4a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.312478] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2a671d-6451-4403-beac-12e1a80debc0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.635007] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ccff0b-6562-26e9-fa7b-f56225c75a0f, 'name': SearchDatastore_Task, 'duration_secs': 0.012137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.635339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.635548] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 956306bc-4701-4c04-8221-8ec0b9df73ca/956306bc-4701-4c04-8221-8ec0b9df73ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.635826] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b575217-7397-44be-bf21-24570a202455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.643127] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 809.643127] env[69994]: value = "task-2925434" [ 809.643127] env[69994]: _type = "Task" [ 809.643127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.652500] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.704370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Releasing lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.704702] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance network_info: |[{"id": "bd1e50cf-3e19-4962-b159-76798af793d4", "address": "fa:16:3e:79:f2:7d", "network": {"id": "ae1a78e1-c3f4-419c-8812-5162e8e8a737", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1539160573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aac564fe3e0434dbb936da74cb0b1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd1e50cf-3e", "ovs_interfaceid": "bd1e50cf-3e19-4962-b159-76798af793d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 809.705120] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:f2:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '84aee122-f630-43c5-9cc1-3a38d3819c82', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd1e50cf-3e19-4962-b159-76798af793d4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.713305] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Creating folder: Project (0aac564fe3e0434dbb936da74cb0b1d9). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.713638] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0a1db39-4d57-425d-ab58-3e5acdeb2063 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.723009] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925432, 'name': PowerOffVM_Task, 'duration_secs': 0.199044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.723259] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.723426] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 809.723662] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f10d4c82-abf5-47b0-803a-f44a37dee50b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.725494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.111s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.728059] env[69994]: DEBUG nova.compute.manager [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.729443] env[69994]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 809.729559] env[69994]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69994) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 809.729985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.611s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.731222] env[69994]: INFO nova.compute.claims [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.734289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b477e355-ccf5-42e8-a351-a088fb0280af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.738302] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Folder already exists: Project (0aac564fe3e0434dbb936da74cb0b1d9). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 809.738521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Creating folder: Instances. Parent ref: group-v587423. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.738989] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5cbfb35-6c34-4848-8d78-3c6545fa7acc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.749899] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Created folder: Instances in parent group-v587423. [ 809.749899] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 809.749899] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 809.749899] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96453451-1bc8-45a0-b63c-d172499395c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.766485] env[69994]: INFO nova.scheduler.client.report [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleted allocations for instance 6fb97a65-bf0b-4e79-9611-f0f3179661b5 [ 809.786716] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.786716] env[69994]: value = "task-2925439" [ 809.786716] env[69994]: _type = "Task" [ 809.786716] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.795942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.797577] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925433, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.804562] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925439, 'name': CreateVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.807169] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 809.807552] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 809.807818] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleting the datastore file [datastore1] e0764e41-0810-45a1-8917-ac901f0f8321 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.808293] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f1e5838-afed-4a80-aa95-0d60c0f2a627 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.816058] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 809.816058] env[69994]: value = "task-2925440" [ 809.816058] env[69994]: _type = "Task" [ 809.816058] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.824823] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.829331] env[69994]: DEBUG nova.compute.manager [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Received event network-vif-plugged-bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.829331] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Acquiring lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.829331] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.829331] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.829331] env[69994]: DEBUG nova.compute.manager [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] No waiting events found dispatching network-vif-plugged-bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 809.829489] env[69994]: WARNING nova.compute.manager [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Received unexpected event network-vif-plugged-bd1e50cf-3e19-4962-b159-76798af793d4 for instance with vm_state building and task_state spawning. [ 809.829742] env[69994]: DEBUG nova.compute.manager [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Received event network-changed-bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.830037] env[69994]: DEBUG nova.compute.manager [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Refreshing instance network info cache due to event network-changed-bd1e50cf-3e19-4962-b159-76798af793d4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 809.830266] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Acquiring lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.830461] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Acquired lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.830716] env[69994]: DEBUG nova.network.neutron [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Refreshing network info cache for port bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.156238] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925434, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.249079] env[69994]: INFO nova.compute.manager [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] instance snapshotting [ 810.251876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0bdf22-2350-4ec9-9882-f4dfdcb3e47c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.274607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d44482a-bca4-4b95-9c80-a547258f5bc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.283645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-085deb60-22fc-4cd1-9471-aa7b492467b4 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6fb97a65-bf0b-4e79-9611-f0f3179661b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.837s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.291535] env[69994]: DEBUG oslo_vmware.api [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925433, 'name': PowerOnVM_Task, 'duration_secs': 0.813989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.295148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.295568] env[69994]: INFO nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Took 9.66 seconds to spawn the instance on the hypervisor. [ 810.295568] env[69994]: DEBUG nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 810.296426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d19ac4-c037-4460-ae00-65d7cb51e425 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.304863] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925439, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.326216] env[69994]: DEBUG oslo_vmware.api [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.326461] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.326640] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.326814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.327069] env[69994]: INFO nova.compute.manager [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Took 1.13 seconds to destroy the instance on the hypervisor. [ 810.327316] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.327501] env[69994]: DEBUG nova.compute.manager [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.327597] env[69994]: DEBUG nova.network.neutron [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.655643] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539718} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.656570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 956306bc-4701-4c04-8221-8ec0b9df73ca/956306bc-4701-4c04-8221-8ec0b9df73ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.656570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.656790] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4778be3e-c475-4916-829c-3ee9362ffb3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.660466] env[69994]: DEBUG nova.network.neutron [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updated VIF entry in instance network info cache for port bd1e50cf-3e19-4962-b159-76798af793d4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.661337] env[69994]: DEBUG nova.network.neutron [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating instance_info_cache with network_info: [{"id": "bd1e50cf-3e19-4962-b159-76798af793d4", "address": "fa:16:3e:79:f2:7d", "network": {"id": "ae1a78e1-c3f4-419c-8812-5162e8e8a737", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1539160573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aac564fe3e0434dbb936da74cb0b1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd1e50cf-3e", "ovs_interfaceid": "bd1e50cf-3e19-4962-b159-76798af793d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.664817] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 810.664817] env[69994]: value = "task-2925441" [ 810.664817] env[69994]: _type = "Task" [ 810.664817] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.673052] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.793487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 810.797428] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d39b4248-5643-41fc-a90e-26334cc0cb59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.809158] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925439, 'name': CreateVM_Task, 'duration_secs': 0.544731} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.809488] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 810.809488] env[69994]: value = "task-2925442" [ 810.809488] env[69994]: _type = "Task" [ 810.809488] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.809672] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 810.814831] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587430', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'name': 'volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4cc99b2f-2d75-4a98-ac02-6b609e0c31d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'serial': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473'}, 'attachment_id': '0b0b68f5-c691-4265-a536-eabe86757d9d', 'device_type': None, 'mount_device': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 810.814831] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 810.820506] env[69994]: INFO nova.compute.manager [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Took 49.49 seconds to build instance. [ 810.821940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817a2fc2-956b-4ae9-9f02-16cf5fed50a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.833042] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.837517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beca3f36-e74e-417f-9583-619e3bd20e25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.846878] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fff51a-7ef0-4ed6-892c-8ac6370558a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.856243] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-15119b36-93ec-4917-b563-e242168f81ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.863799] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 810.863799] env[69994]: value = "task-2925443" [ 810.863799] env[69994]: _type = "Task" [ 810.863799] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.871458] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.029041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.029245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.029337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.029522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.029688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.034242] env[69994]: INFO nova.compute.manager [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Terminating instance [ 811.106734] env[69994]: DEBUG nova.network.neutron [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.166199] env[69994]: DEBUG oslo_concurrency.lockutils [req-2a0ce606-9413-4f58-abc6-0da18a14d0cb req-9ea9f6b3-08ee-4ab5-9fca-19e6f083beae service nova] Releasing lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.175429] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087217} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.179603] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.180763] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd979bc-8cad-4636-8a8d-b418d5e8b7b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.204962] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 956306bc-4701-4c04-8221-8ec0b9df73ca/956306bc-4701-4c04-8221-8ec0b9df73ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.210591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdd76921-50e1-48b0-a0f0-f05468ccb36c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.227933] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 811.227933] env[69994]: value = "task-2925444" [ 811.227933] env[69994]: _type = "Task" [ 811.227933] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.238320] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925444, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.270631] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d210327-c6c4-43eb-bef4-7d85aa4afdcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.281165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3209e0bd-15c1-4811-abec-3aefa9b8bc8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.320505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7203490-6ff8-44f1-9f56-296c20b8df6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.330263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b0dff5bb-c5a6-4339-a631-dc8938ed2ae5 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.042s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.330643] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.335782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9c635d-f6d4-4050-b2bb-abe5f3c34c82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.353791] env[69994]: DEBUG nova.compute.provider_tree [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.377480] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 35%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.538939] env[69994]: DEBUG nova.compute.manager [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 811.539240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.540221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcdcafe-5b9e-4d0e-b884-ed0d4a157114 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.551141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 811.551440] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4654eab-d883-426e-8243-209f14c35d73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.561287] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 811.561287] env[69994]: value = "task-2925445" [ 811.561287] env[69994]: _type = "Task" [ 811.561287] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.573075] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.609686] env[69994]: INFO nova.compute.manager [-] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Took 1.28 seconds to deallocate network for instance. [ 811.741023] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.828499] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.829911] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 811.830242] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587479', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'name': 'volume-36d050ad-1ac5-4518-a08c-d07445bd0225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ab320e59-febb-4f8f-9bc4-74227d29c752', 'attached_at': '', 'detached_at': '', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'serial': '36d050ad-1ac5-4518-a08c-d07445bd0225'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 811.831411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4159f662-c4eb-4740-b353-c4f2c8bd4a02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.850692] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 811.854100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb58718-ad72-4351-bc31-7c775a3c7a73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.892606] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] volume-36d050ad-1ac5-4518-a08c-d07445bd0225/volume-36d050ad-1ac5-4518-a08c-d07445bd0225.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.895022] env[69994]: ERROR nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [req-9f3170d1-7447-42c4-b037-72dede48061c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9f3170d1-7447-42c4-b037-72dede48061c"}]} [ 811.898719] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b713f326-256e-4c4e-9889-c7fbca4552e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.913875] env[69994]: DEBUG nova.compute.manager [req-fa52c0d1-0253-46cc-9dfa-7d67f2b19592 req-f9860755-7cd4-426a-ac1c-a3dce99aaf1a service nova] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Received event network-vif-deleted-ef5413fb-71f6-42a8-a79c-ae48503f6015 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.926764] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 811.926764] env[69994]: value = "task-2925446" [ 811.926764] env[69994]: _type = "Task" [ 811.926764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.927412] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.931847] env[69994]: DEBUG nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 811.944058] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.949820] env[69994]: DEBUG nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 811.950080] env[69994]: DEBUG nova.compute.provider_tree [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.965281] env[69994]: DEBUG nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 811.989327] env[69994]: DEBUG nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 812.077106] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.118244] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.240291] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.330174] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.374324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.397798] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 62%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.444559] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.452028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.452028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.452028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.452028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.452028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.452028] env[69994]: INFO nova.compute.manager [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Terminating instance [ 812.502272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e533bf-8627-4e2b-9e65-13b3cf97ec6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.513683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499323f6-6768-4d65-b495-5612d7e44a63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.556012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c9ea20-8d01-4a76-a581-babe2a5ec406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.568607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3899838c-4275-4130-81e9-0be0323548dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.584840] env[69994]: DEBUG nova.compute.provider_tree [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.589359] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925445, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.738630] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925444, 'name': ReconfigVM_Task, 'duration_secs': 1.039351} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.738926] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 956306bc-4701-4c04-8221-8ec0b9df73ca/956306bc-4701-4c04-8221-8ec0b9df73ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.739600] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d96e8c7-4473-49b5-9e61-45995d05e290 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.749274] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 812.749274] env[69994]: value = "task-2925447" [ 812.749274] env[69994]: _type = "Task" [ 812.749274] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.763915] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925447, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.829125] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.898944] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 73%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.939834] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925446, 'name': ReconfigVM_Task, 'duration_secs': 0.6834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.940549] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfigured VM instance instance-00000020 to attach disk [datastore1] volume-36d050ad-1ac5-4518-a08c-d07445bd0225/volume-36d050ad-1ac5-4518-a08c-d07445bd0225.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.945566] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dece3a7-d8b2-44cf-b21c-6be4a90b6404 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.956803] env[69994]: DEBUG nova.compute.manager [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.956983] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.957876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65986424-bff1-4230-af94-2e684c6a8a34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.962575] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 812.962575] env[69994]: value = "task-2925448" [ 812.962575] env[69994]: _type = "Task" [ 812.962575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.968262] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.968938] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a49d77ce-b631-4bde-8f0d-ff91aeae3a19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.973690] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925448, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.980320] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 812.980320] env[69994]: value = "task-2925449" [ 812.980320] env[69994]: _type = "Task" [ 812.980320] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.989381] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.074122] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925445, 'name': PowerOffVM_Task, 'duration_secs': 1.077207} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.074460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.074643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.074888] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9db4e45c-9264-430b-a076-ee66e95e3159 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.127631] env[69994]: DEBUG nova.scheduler.client.report [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 813.127994] env[69994]: DEBUG nova.compute.provider_tree [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 70 to 71 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 813.128123] env[69994]: DEBUG nova.compute.provider_tree [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 813.260864] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925447, 'name': Rename_Task, 'duration_secs': 0.363115} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.261211] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.261446] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6f2c2b2-5677-475e-b0b1-48c6b26a2c3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.271700] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 813.271700] env[69994]: value = "task-2925451" [ 813.271700] env[69994]: _type = "Task" [ 813.271700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.276408] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.276647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.277192] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleting the datastore file [datastore2] 6aacfc4e-32b4-40d7-8240-e4449cf78925 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.277537] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b13bf46-d594-40b7-8337-f22f7ef6e089 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.284777] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.286359] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for the task: (returnval){ [ 813.286359] env[69994]: value = "task-2925452" [ 813.286359] env[69994]: _type = "Task" [ 813.286359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.295136] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.329192] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.397531] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 86%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.473509] env[69994]: DEBUG oslo_vmware.api [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925448, 'name': ReconfigVM_Task, 'duration_secs': 0.183977} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.473912] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587479', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'name': 'volume-36d050ad-1ac5-4518-a08c-d07445bd0225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ab320e59-febb-4f8f-9bc4-74227d29c752', 'attached_at': '', 'detached_at': '', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'serial': '36d050ad-1ac5-4518-a08c-d07445bd0225'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 813.490153] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.634432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.904s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.635085] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.638202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.221s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.639024] env[69994]: DEBUG nova.objects.instance [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lazy-loading 'resources' on Instance uuid 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.782456] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925451, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.796694] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.830289] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.896896] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.990396] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925449, 'name': PowerOffVM_Task, 'duration_secs': 0.854895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.990670] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.990838] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.991107] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bedababa-2258-4e41-b160-9555406b6bcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.088586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.088821] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.089012] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Deleting the datastore file [datastore2] 3c814c83-20cc-4871-9f30-5c0c7d99b8a1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.089294] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3161749b-f6df-459e-8c01-c2cfec0adcd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.096325] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for the task: (returnval){ [ 814.096325] env[69994]: value = "task-2925454" [ 814.096325] env[69994]: _type = "Task" [ 814.096325] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.103544] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.143592] env[69994]: DEBUG nova.compute.utils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.145364] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.145583] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.194806] env[69994]: DEBUG nova.policy [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0cba98ba96246a0a9995caa28b03833', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a35626d7559c4a72a8f0e932b3d47de7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.283847] env[69994]: DEBUG oslo_vmware.api [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925451, 'name': PowerOnVM_Task, 'duration_secs': 0.675817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.286420] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.286641] env[69994]: INFO nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Took 9.59 seconds to spawn the instance on the hypervisor. [ 814.286812] env[69994]: DEBUG nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.288309] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9c951c-2029-4a62-9798-4f30a5504174 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.305189] env[69994]: DEBUG oslo_vmware.api [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Task: {'id': task-2925452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.746022} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.305927] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.306155] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.306321] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.306546] env[69994]: INFO nova.compute.manager [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Took 2.77 seconds to destroy the instance on the hypervisor. [ 814.306761] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.306953] env[69994]: DEBUG nova.compute.manager [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.307097] env[69994]: DEBUG nova.network.neutron [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.331244] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925442, 'name': CreateSnapshot_Task, 'duration_secs': 3.431666} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.331674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 814.332812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050dcbab-fbde-4f67-9e40-6107721f91a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.402157] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.514084] env[69994]: DEBUG nova.objects.instance [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid ab320e59-febb-4f8f-9bc4-74227d29c752 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 814.606980] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.617526] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Successfully created port: d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.636119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e561f1-ec97-46b7-ad16-0e5f57685c00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.644260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13891784-c73d-493a-a75e-f109770f4cd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.649631] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.683165] env[69994]: DEBUG nova.network.neutron [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.686627] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0e00b2-ba12-4674-93d1-19f57ba629cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.689754] env[69994]: DEBUG nova.compute.manager [req-519efe7b-b0b2-4e79-9935-190da8cbaf4f req-43b59a67-da9e-4550-9d03-0a8640c8c674 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Received event network-vif-deleted-35726e36-0b23-4204-b28b-90bc44467363 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.689990] env[69994]: INFO nova.compute.manager [req-519efe7b-b0b2-4e79-9935-190da8cbaf4f req-43b59a67-da9e-4550-9d03-0a8640c8c674 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Neutron deleted interface 35726e36-0b23-4204-b28b-90bc44467363; detaching it from the instance and deleting it from the info cache [ 814.690222] env[69994]: DEBUG nova.network.neutron [req-519efe7b-b0b2-4e79-9935-190da8cbaf4f req-43b59a67-da9e-4550-9d03-0a8640c8c674 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.698391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff35ed8-7f3f-4d38-bb18-e137ba5f6c77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.713362] env[69994]: DEBUG nova.compute.provider_tree [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.815873] env[69994]: INFO nova.compute.manager [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Took 44.28 seconds to build instance. [ 814.855399] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 814.856163] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-544dfb0c-81c9-439a-880b-967a03cfa542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.865875] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 814.865875] env[69994]: value = "task-2925455" [ 814.865875] env[69994]: _type = "Task" [ 814.865875] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.877858] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925455, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.897343] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.928329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Acquiring lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.928644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Acquired lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.929017] env[69994]: DEBUG nova.network.neutron [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.984123] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Successfully created port: fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.020924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef6888c6-453c-4ed2-8a22-26d916f4ceab tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.820s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.112798] env[69994]: DEBUG oslo_vmware.api [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Task: {'id': task-2925454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.568648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.113204] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.113454] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.113638] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.115425] env[69994]: INFO nova.compute.manager [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Took 2.16 seconds to destroy the instance on the hypervisor. [ 815.115749] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.116055] env[69994]: DEBUG nova.compute.manager [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 815.116177] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.192770] env[69994]: INFO nova.compute.manager [-] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Took 0.89 seconds to deallocate network for instance. [ 815.195019] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a8fc6c6-c968-4ff8-bde5-071f57e03074 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.214645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022e6957-1737-44a1-819c-51be66eeb643 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.233011] env[69994]: DEBUG nova.scheduler.client.report [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.264247] env[69994]: DEBUG nova.compute.manager [req-519efe7b-b0b2-4e79-9935-190da8cbaf4f req-43b59a67-da9e-4550-9d03-0a8640c8c674 service nova] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Detach interface failed, port_id=35726e36-0b23-4204-b28b-90bc44467363, reason: Instance 6aacfc4e-32b4-40d7-8240-e4449cf78925 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 815.318107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bd725dfd-af4f-4081-8ac4-9b6442a46318 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.679s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.376364] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925455, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.398285] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925443, 'name': RelocateVM_Task, 'duration_secs': 4.404307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.398593] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 815.398789] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587430', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'name': 'volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4cc99b2f-2d75-4a98-ac02-6b609e0c31d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'serial': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 815.399570] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b734992-015f-49e3-9bb0-a76892f36b0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.416821] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38e8acc-963c-46c1-a77d-5b01e978ae48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.443193] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473/volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.444363] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbf44c64-a999-4b18-8528-9c19f54bc0b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.206962] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.209998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.210249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.572s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.213199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.589s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.214601] env[69994]: INFO nova.compute.claims [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.228951] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925455, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.229867] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 816.229867] env[69994]: value = "task-2925456" [ 816.229867] env[69994]: _type = "Task" [ 816.229867] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.239829] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.242153] env[69994]: INFO nova.scheduler.client.report [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleted allocations for instance 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6 [ 816.248545] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.248804] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.249317] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.249317] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.249317] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.249508] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.249642] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.249804] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.249968] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.250655] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.250655] env[69994]: DEBUG nova.virt.hardware [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.251245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39672082-fea0-48c7-ac11-0d56673bb3dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.261216] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18414ae8-0e6b-44b0-8235-371df6befb88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.495769] env[69994]: DEBUG nova.network.neutron [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Updating instance_info_cache with network_info: [{"id": "264f000d-41b7-4904-8621-8cd06efa69c8", "address": "fa:16:3e:d7:4b:a5", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264f000d-41", "ovs_interfaceid": "264f000d-41b7-4904-8621-8cd06efa69c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.706408] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.722160] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925455, 'name': CloneVM_Task, 'duration_secs': 1.588502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.723938] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Created linked-clone VM from snapshot [ 816.724903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b485830-08e4-4c1e-9185-fb36a52c1ddd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.734446] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Uploading image 6f3e4a4d-8364-4199-a4a2-ab2eae0f6c0d {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 816.745524] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925456, 'name': ReconfigVM_Task, 'duration_secs': 0.326646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.746413] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 816.746697] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473/volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.754096] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a274b563-35bf-4170-a33e-d8ba9b01b1c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.756068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-156ac9d6-92e4-4e08-8047-bfa005372cc6 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.157s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.757227] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5e22a9f-03d8-4336-aafd-b6df82482bd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.779771] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 816.779771] env[69994]: value = "task-2925457" [ 816.779771] env[69994]: _type = "Task" [ 816.779771] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.782031] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 816.782031] env[69994]: value = "task-2925458" [ 816.782031] env[69994]: _type = "Task" [ 816.782031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.793270] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925457, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.797037] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925458, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.914612] env[69994]: DEBUG nova.compute.manager [req-333eaca5-2453-4f77-af46-17f8e9d8e9ad req-991b2e41-bf5d-4447-8cf1-67d5bcefc357 service nova] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Received event network-vif-deleted-c7ee288e-97b3-4ede-8384-13bef028a530 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.962768] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Successfully updated port: d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.967619] env[69994]: DEBUG nova.compute.manager [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-vif-plugged-d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.967619] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] Acquiring lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.967764] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.967857] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.968113] env[69994]: DEBUG nova.compute.manager [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] No waiting events found dispatching network-vif-plugged-d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.968301] env[69994]: WARNING nova.compute.manager [req-ffc38d11-10ce-4659-ac01-9fd8432e3586 req-7448fc57-ecda-49f1-ab61-8d48872d4dd3 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received unexpected event network-vif-plugged-d88e5a75-d299-4552-ae33-7d3b1508aefd for instance with vm_state building and task_state spawning. [ 817.001250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Releasing lock "refresh_cache-956306bc-4701-4c04-8221-8ec0b9df73ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.001250] env[69994]: DEBUG nova.compute.manager [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 817.001250] env[69994]: DEBUG nova.compute.manager [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] network_info to inject: |[{"id": "264f000d-41b7-4904-8621-8cd06efa69c8", "address": "fa:16:3e:d7:4b:a5", "network": {"id": "d06e166b-6a41-4b34-867a-f1d420734b41", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1152970649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34469ad51e694a3389595c28ef508144", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap264f000d-41", "ovs_interfaceid": "264f000d-41b7-4904-8621-8cd06efa69c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 817.004614] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 817.005416] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cc4e188-701f-4e1a-880c-2a93ca0e1fcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.022782] env[69994]: DEBUG oslo_vmware.api [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Waiting for the task: (returnval){ [ 817.022782] env[69994]: value = "task-2925459" [ 817.022782] env[69994]: _type = "Task" [ 817.022782] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.031872] env[69994]: DEBUG oslo_vmware.api [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Task: {'id': task-2925459, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.208806] env[69994]: INFO nova.compute.manager [-] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Took 2.09 seconds to deallocate network for instance. [ 817.295737] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925457, 'name': Destroy_Task, 'duration_secs': 0.371361} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.296029] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Destroyed the VM [ 817.296284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 817.296525] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3491d82-6714-4598-b7a2-73683665e0bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.303150] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925458, 'name': ReconfigVM_Task, 'duration_secs': 0.145103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.303150] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587430', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'name': 'volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4cc99b2f-2d75-4a98-ac02-6b609e0c31d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'serial': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 817.304339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5dd3bee-8485-4dc9-b414-d692773691a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.306874] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 817.306874] env[69994]: value = "task-2925460" [ 817.306874] env[69994]: _type = "Task" [ 817.306874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.311038] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 817.311038] env[69994]: value = "task-2925461" [ 817.311038] env[69994]: _type = "Task" [ 817.311038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.318265] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925460, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.325314] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925461, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.474323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.474603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.534236] env[69994]: DEBUG oslo_vmware.api [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] Task: {'id': task-2925459, 'name': ReconfigVM_Task, 'duration_secs': 0.184082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.534627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-feeb469d-6ba7-484f-ae1b-0faabf7cf362 tempest-ServersAdminTestJSON-1459039412 tempest-ServersAdminTestJSON-1459039412-project-admin] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 817.708820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cae90c2-369c-4e2d-b765-ded579020677 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.716873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.720730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c264a16a-2093-4124-9979-963e0b31136e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.756047] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a32e1d-9d16-4350-8b20-0cb22aa1d3bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.764321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cbc24c-f53a-49a6-ac92-7c2749035be2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.778786] env[69994]: DEBUG nova.compute.provider_tree [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.818556] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925460, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.823735] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925461, 'name': Rename_Task, 'duration_secs': 0.123486} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.824098] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.824367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e5bd941-131b-425a-ab62-11382cdde58f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.830632] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 817.830632] env[69994]: value = "task-2925462" [ 817.830632] env[69994]: _type = "Task" [ 817.830632] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.838501] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925462, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.976833] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 818.217653] env[69994]: INFO nova.compute.manager [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Rebuilding instance [ 818.254159] env[69994]: DEBUG nova.compute.manager [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.255642] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788fa363-c5ff-4726-a3b3-212008f60be8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.282536] env[69994]: DEBUG nova.scheduler.client.report [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.319327] env[69994]: DEBUG oslo_vmware.api [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925460, 'name': RemoveSnapshot_Task, 'duration_secs': 0.979388} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.319683] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 818.340550] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925462, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.504501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.791856] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.792758] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.795753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.252s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.796102] env[69994]: DEBUG nova.objects.instance [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lazy-loading 'resources' on Instance uuid 03a10403-0253-4df0-84b2-1e56f0c057fe {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.824510] env[69994]: WARNING nova.compute.manager [None req-e0e3df10-beac-43df-b53a-fffe560054a9 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Image not found during snapshot: nova.exception.ImageNotFound: Image 6f3e4a4d-8364-4199-a4a2-ab2eae0f6c0d could not be found. [ 818.842076] env[69994]: DEBUG oslo_vmware.api [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925462, 'name': PowerOnVM_Task, 'duration_secs': 1.009112} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.842076] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.842076] env[69994]: INFO nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Took 9.54 seconds to spawn the instance on the hypervisor. [ 818.842076] env[69994]: DEBUG nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.842625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a86ea1-025c-4720-867b-53293d3be9bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.053676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.053984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.054217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.054402] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.054569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.058120] env[69994]: DEBUG nova.compute.manager [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-changed-d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.058318] env[69994]: DEBUG nova.compute.manager [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Refreshing instance network info cache due to event network-changed-d88e5a75-d299-4552-ae33-7d3b1508aefd. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 819.058505] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Acquiring lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.058643] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Acquired lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.058797] env[69994]: DEBUG nova.network.neutron [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Refreshing network info cache for port d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 819.060196] env[69994]: INFO nova.compute.manager [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Terminating instance [ 819.105755] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Successfully updated port: fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.269159] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.269515] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d266eeca-07b5-4985-a3a3-0a1dd01b35ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.277060] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 819.277060] env[69994]: value = "task-2925463" [ 819.277060] env[69994]: _type = "Task" [ 819.277060] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.284792] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.299712] env[69994]: DEBUG nova.compute.utils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.304388] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.304775] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.344174] env[69994]: DEBUG nova.policy [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1be5d892102470990945b2dc1678832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b24b45b67e4a7aade59619ba342f82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.362975] env[69994]: INFO nova.compute.manager [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Took 45.63 seconds to build instance. [ 819.566499] env[69994]: DEBUG nova.compute.manager [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 819.566825] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.568215] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210977b8-bc3e-40fc-818a-6cf7b65c8ab8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.587657] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.588414] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d6c1553-e7a0-4c78-b1db-13c4dace69a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.596506] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 819.596506] env[69994]: value = "task-2925464" [ 819.596506] env[69994]: _type = "Task" [ 819.596506] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.606386] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.611372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.624839] env[69994]: DEBUG nova.network.neutron [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.770863] env[69994]: DEBUG nova.network.neutron [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.788466] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925463, 'name': PowerOffVM_Task, 'duration_secs': 0.362361} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.791505] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 819.791769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.792716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2bfee2-6f9f-4202-ba36-306ef34a9855 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.800586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 819.800869] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f59b093-8104-421b-abcd-8bb72358d0c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.804923] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.820219] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Successfully created port: 2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.825538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c4a593-f68d-4197-81fc-210265c1a57e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.835062] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f4377d-7fb5-4487-9d86-5545df33b11a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.870675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0cf38c8b-9cb2-4e1f-88a4-c3a416afe62a tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.254s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.871743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcab245-4d47-4c82-ba27-cdafb12b03ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.879970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70331c50-3a8a-4c44-bd47-77397ad5db4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.897580] env[69994]: DEBUG nova.compute.provider_tree [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.015504] env[69994]: DEBUG nova.compute.manager [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Received event network-changed-bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.015697] env[69994]: DEBUG nova.compute.manager [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Refreshing instance network info cache due to event network-changed-bd1e50cf-3e19-4962-b159-76798af793d4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 820.015910] env[69994]: DEBUG oslo_concurrency.lockutils [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] Acquiring lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.016091] env[69994]: DEBUG oslo_concurrency.lockutils [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] Acquired lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.016263] env[69994]: DEBUG nova.network.neutron [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Refreshing network info cache for port bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.108968] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925464, 'name': PowerOffVM_Task, 'duration_secs': 0.238845} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.109275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.109445] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.109703] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86709789-cd7d-4b33-aa14-0d5920595c32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.217152] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.217152] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.217152] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.218576] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2da30943-dbd7-451c-b199-17c34eb7334a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.220142] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.220338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.220506] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleting the datastore file [datastore2] 9269e42b-b05c-4c88-9008-aaeda4b0248f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.220765] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-838201ca-ddcc-4d65-b55b-2e82fae7830d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.230031] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 820.230031] env[69994]: value = "task-2925468" [ 820.230031] env[69994]: _type = "Task" [ 820.230031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.230031] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 820.230031] env[69994]: value = "task-2925467" [ 820.230031] env[69994]: _type = "Task" [ 820.230031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.238578] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.241445] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.274264] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Releasing lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.274536] env[69994]: DEBUG nova.compute.manager [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-vif-plugged-fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.274732] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Acquiring lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.274941] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.275121] env[69994]: DEBUG oslo_concurrency.lockutils [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.275287] env[69994]: DEBUG nova.compute.manager [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] No waiting events found dispatching network-vif-plugged-fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.275450] env[69994]: WARNING nova.compute.manager [req-f013fabf-0c52-4985-9625-0ce43ca3a386 req-b6c9b41a-1ec8-41b4-a32e-09d1761be861 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received unexpected event network-vif-plugged-fa46c420-ff1f-4c8e-a035-2028f969e7c0 for instance with vm_state building and task_state spawning. [ 820.275787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.276031] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.401035] env[69994]: DEBUG nova.scheduler.client.report [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.744436] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157738} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.747754] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.747840] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.748610] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.751261] env[69994]: DEBUG oslo_vmware.api [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.752734] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.752734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.752734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.752734] env[69994]: INFO nova.compute.manager [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 820.752734] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.752734] env[69994]: DEBUG nova.compute.manager [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 820.752734] env[69994]: DEBUG nova.network.neutron [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.769056] env[69994]: DEBUG nova.network.neutron [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updated VIF entry in instance network info cache for port bd1e50cf-3e19-4962-b159-76798af793d4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.769446] env[69994]: DEBUG nova.network.neutron [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating instance_info_cache with network_info: [{"id": "bd1e50cf-3e19-4962-b159-76798af793d4", "address": "fa:16:3e:79:f2:7d", "network": {"id": "ae1a78e1-c3f4-419c-8812-5162e8e8a737", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1539160573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aac564fe3e0434dbb936da74cb0b1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd1e50cf-3e", "ovs_interfaceid": "bd1e50cf-3e19-4962-b159-76798af793d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.812285] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.816078] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.846056] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.846340] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.846540] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.846735] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.846884] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.847045] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.847262] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.847420] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.847739] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.847739] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.847909] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.848807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0ee6d2-9bd3-41b8-98c5-249392cb32e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.859509] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff5c862-fb03-4f43-8dfd-76dea5c1efc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.906252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.909569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.175s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.910157] env[69994]: INFO nova.compute.claims [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.937271] env[69994]: INFO nova.scheduler.client.report [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Deleted allocations for instance 03a10403-0253-4df0-84b2-1e56f0c057fe [ 821.092108] env[69994]: DEBUG nova.compute.manager [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-changed-fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.092321] env[69994]: DEBUG nova.compute.manager [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Refreshing instance network info cache due to event network-changed-fa46c420-ff1f-4c8e-a035-2028f969e7c0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 821.092601] env[69994]: DEBUG oslo_concurrency.lockutils [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] Acquiring lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.167969] env[69994]: DEBUG nova.network.neutron [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updating instance_info_cache with network_info: [{"id": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "address": "fa:16:3e:29:7b:ac", "network": {"id": "e481868b-2eb2-4057-bdbf-d6d188a5f307", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-631373651", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88e5a75-d2", "ovs_interfaceid": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "address": "fa:16:3e:91:6f:ad", "network": {"id": "1ba51e65-3669-40b4-ad88-3c0bb777027d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1551134203", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa46c420-ff", "ovs_interfaceid": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.272237] env[69994]: DEBUG oslo_concurrency.lockutils [req-708e96a8-50d4-4141-8ebb-da8ea0204062 req-96659c3b-ea1a-4880-be83-64bf9d183cb6 service nova] Releasing lock "refresh_cache-4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.447887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-efde961e-2090-491b-b9ac-428458a3eb84 tempest-ServerTagsTestJSON-131042071 tempest-ServerTagsTestJSON-131042071-project-member] Lock "03a10403-0253-4df0-84b2-1e56f0c057fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.337s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.492781] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Successfully updated port: 2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.495746] env[69994]: DEBUG nova.network.neutron [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.671655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.671998] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance network_info: |[{"id": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "address": "fa:16:3e:29:7b:ac", "network": {"id": "e481868b-2eb2-4057-bdbf-d6d188a5f307", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-631373651", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88e5a75-d2", "ovs_interfaceid": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "address": "fa:16:3e:91:6f:ad", "network": {"id": "1ba51e65-3669-40b4-ad88-3c0bb777027d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1551134203", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa46c420-ff", "ovs_interfaceid": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.672370] env[69994]: DEBUG oslo_concurrency.lockutils [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] Acquired lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.672565] env[69994]: DEBUG nova.network.neutron [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Refreshing network info cache for port fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.673822] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:7b:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2928baf1-3efb-4205-a786-d9783e51f699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd88e5a75-d299-4552-ae33-7d3b1508aefd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:6f:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91b0f7e5-0d1a-46e2-bf73-09656211dea2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa46c420-ff1f-4c8e-a035-2028f969e7c0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.689271] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.692726] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.693821] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d8f1c8e-7631-4eb7-885f-a9f652ad6363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.717987] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.717987] env[69994]: value = "task-2925469" [ 821.717987] env[69994]: _type = "Task" [ 821.717987] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.725133] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925469, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.792253] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.792534] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.792694] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.792870] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.793031] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.793195] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.793404] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.793562] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.793726] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.793888] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.794109] env[69994]: DEBUG nova.virt.hardware [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.795047] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a589fa-bbf7-40ac-8a4b-17e4e85ba3d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.803825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ecfbf8-d071-4923-bd7f-c7e5ab052473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.820369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:df:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a81f7c8f-8499-4f74-8860-fca65590ea7b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.828024] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.830763] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.831016] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e58d529c-28bd-47d5-b339-3d9d8976b0b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.852167] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.852167] env[69994]: value = "task-2925470" [ 821.852167] env[69994]: _type = "Task" [ 821.852167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.860155] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925470, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.995152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.996627] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.996627] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.004023] env[69994]: INFO nova.compute.manager [-] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Took 1.25 seconds to deallocate network for instance. [ 822.024365] env[69994]: DEBUG nova.network.neutron [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updated VIF entry in instance network info cache for port fa46c420-ff1f-4c8e-a035-2028f969e7c0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.024365] env[69994]: DEBUG nova.network.neutron [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updating instance_info_cache with network_info: [{"id": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "address": "fa:16:3e:29:7b:ac", "network": {"id": "e481868b-2eb2-4057-bdbf-d6d188a5f307", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-631373651", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd88e5a75-d2", "ovs_interfaceid": "d88e5a75-d299-4552-ae33-7d3b1508aefd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "address": "fa:16:3e:91:6f:ad", "network": {"id": "1ba51e65-3669-40b4-ad88-3c0bb777027d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1551134203", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa46c420-ff", "ovs_interfaceid": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.057181] env[69994]: DEBUG nova.compute.manager [req-a8dc1dd1-b63a-4cca-957f-5da44744858c req-5b8f9dd8-6058-4b26-8bc1-0e53d7b671bd service nova] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Received event network-vif-deleted-c3dea188-eaa9-40c8-ad7a-c49683af00cb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.228697] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925469, 'name': CreateVM_Task, 'duration_secs': 0.453455} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.228861] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.229952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.230177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.230500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.230759] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de35b49b-3082-4afd-9d8d-8d586d54ac7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.238116] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 822.238116] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9bdd4-fe1e-6eef-b39b-d8f78addb433" [ 822.238116] env[69994]: _type = "Task" [ 822.238116] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.244701] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9bdd4-fe1e-6eef-b39b-d8f78addb433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.364268] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925470, 'name': CreateVM_Task, 'duration_secs': 0.354276} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.364268] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.364268] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7c5aee-d8da-426c-aa1e-d75362db38cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.365732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.369991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4a72ae-96f4-42e3-b8cf-71c50bde786a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.408333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627a3d3b-8b88-42a1-931a-93562f3bce8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.416010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e026c5c7-b873-485e-8434-00dc0ff828ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.430036] env[69994]: DEBUG nova.compute.provider_tree [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.511019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.527160] env[69994]: DEBUG oslo_concurrency.lockutils [req-00cc5561-3aed-441e-98cd-a2218cf8e983 req-67248710-90cd-4213-befa-51c1d5b7d0dd service nova] Releasing lock "refresh_cache-f6408fad-a6b8-4868-a192-3acd065935ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.531971] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.702175] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Updating instance_info_cache with network_info: [{"id": "2d80d63c-74f9-46d4-b671-7fb818563da4", "address": "fa:16:3e:e9:27:4a", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d80d63c-74", "ovs_interfaceid": "2d80d63c-74f9-46d4-b671-7fb818563da4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.750151] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d9bdd4-fe1e-6eef-b39b-d8f78addb433, 'name': SearchDatastore_Task, 'duration_secs': 0.033422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.750151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.750151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.750151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.750151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.750151] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.750151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.750151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.750151] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df758459-a821-4e91-a98c-0251b40beb5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.750970] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a395092b-3ec5-4f13-9418-69e5313ea07c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.755714] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 822.755714] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131bb4-eb61-d992-9ae6-d72a0c3fccc1" [ 822.755714] env[69994]: _type = "Task" [ 822.755714] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.763222] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131bb4-eb61-d992-9ae6-d72a0c3fccc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.764257] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.764425] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.765112] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7cddef3-f7d5-4d1c-9591-1d1be0fb03e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.769647] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 822.769647] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f7814-3017-a452-946e-3bb9d0abbbe9" [ 822.769647] env[69994]: _type = "Task" [ 822.769647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.776987] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f7814-3017-a452-946e-3bb9d0abbbe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.934677] env[69994]: DEBUG nova.scheduler.client.report [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.119891] env[69994]: DEBUG nova.compute.manager [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Received event network-vif-plugged-2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.120174] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Acquiring lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.120406] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.120557] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.120724] env[69994]: DEBUG nova.compute.manager [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] No waiting events found dispatching network-vif-plugged-2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 823.120921] env[69994]: WARNING nova.compute.manager [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Received unexpected event network-vif-plugged-2d80d63c-74f9-46d4-b671-7fb818563da4 for instance with vm_state building and task_state spawning. [ 823.121082] env[69994]: DEBUG nova.compute.manager [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Received event network-changed-2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.121222] env[69994]: DEBUG nova.compute.manager [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Refreshing instance network info cache due to event network-changed-2d80d63c-74f9-46d4-b671-7fb818563da4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.121426] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Acquiring lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.204717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.205085] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Instance network_info: |[{"id": "2d80d63c-74f9-46d4-b671-7fb818563da4", "address": "fa:16:3e:e9:27:4a", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d80d63c-74", "ovs_interfaceid": "2d80d63c-74f9-46d4-b671-7fb818563da4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.205364] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Acquired lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.205797] env[69994]: DEBUG nova.network.neutron [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Refreshing network info cache for port 2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.206870] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:27:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d80d63c-74f9-46d4-b671-7fb818563da4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.214753] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.215709] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.215945] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc732d1d-2791-4a36-a32f-7cb2b8db4ce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.237122] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.237122] env[69994]: value = "task-2925471" [ 823.237122] env[69994]: _type = "Task" [ 823.237122] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.245271] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925471, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.264944] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52131bb4-eb61-d992-9ae6-d72a0c3fccc1, 'name': SearchDatastore_Task, 'duration_secs': 0.0232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.265217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.265452] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.265669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.278609] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f7814-3017-a452-946e-3bb9d0abbbe9, 'name': SearchDatastore_Task, 'duration_secs': 0.040037} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.279444] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e500ac-6d59-4e7f-9bb6-b41c913bcc10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.284500] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 823.284500] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52366646-5f2c-90f6-56f4-29e1a7664e77" [ 823.284500] env[69994]: _type = "Task" [ 823.284500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.294094] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52366646-5f2c-90f6-56f4-29e1a7664e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.440046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.440408] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.443437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.874s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.443675] env[69994]: DEBUG nova.objects.instance [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'resources' on Instance uuid 00ab07b7-e7ed-4a71-b684-d5af8b1b7616 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.748857] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925471, 'name': CreateVM_Task, 'duration_secs': 0.450169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.749146] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.749658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.749824] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.750141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.750382] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a46229e-e68f-474b-8a83-116100e1abf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.754456] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 823.754456] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fec250-2e96-346b-9b07-ed6e192265f1" [ 823.754456] env[69994]: _type = "Task" [ 823.754456] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.761810] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fec250-2e96-346b-9b07-ed6e192265f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.795696] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52366646-5f2c-90f6-56f4-29e1a7664e77, 'name': SearchDatastore_Task, 'duration_secs': 0.025881} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.796025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.796307] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f6408fad-a6b8-4868-a192-3acd065935ea/f6408fad-a6b8-4868-a192-3acd065935ea.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.796587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.796774] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.796986] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01db74a8-3dc1-4944-bfa0-d64ede4f8a13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.799052] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a8c8208-5bf1-4243-9850-874da03f0613 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.805127] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 823.805127] env[69994]: value = "task-2925472" [ 823.805127] env[69994]: _type = "Task" [ 823.805127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.811225] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.811332] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.812135] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c470dc31-2464-4717-aee3-5ae0ebf74380 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.819367] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.820559] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 823.820559] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d66458-04dd-6a61-52ae-acf28846f359" [ 823.820559] env[69994]: _type = "Task" [ 823.820559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.828284] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d66458-04dd-6a61-52ae-acf28846f359, 'name': SearchDatastore_Task, 'duration_secs': 0.00827} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.828995] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c3f829d-a994-42f9-aa09-02360f138db8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.835495] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 823.835495] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528f7594-5cc3-97f8-4658-9888f68d53e9" [ 823.835495] env[69994]: _type = "Task" [ 823.835495] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.842480] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528f7594-5cc3-97f8-4658-9888f68d53e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.934706] env[69994]: DEBUG nova.network.neutron [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Updated VIF entry in instance network info cache for port 2d80d63c-74f9-46d4-b671-7fb818563da4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.935095] env[69994]: DEBUG nova.network.neutron [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Updating instance_info_cache with network_info: [{"id": "2d80d63c-74f9-46d4-b671-7fb818563da4", "address": "fa:16:3e:e9:27:4a", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d80d63c-74", "ovs_interfaceid": "2d80d63c-74f9-46d4-b671-7fb818563da4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.946249] env[69994]: DEBUG nova.compute.utils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 823.950344] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.950746] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.986662] env[69994]: DEBUG nova.policy [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1be5d892102470990945b2dc1678832', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5b24b45b67e4a7aade59619ba342f82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.273214] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fec250-2e96-346b-9b07-ed6e192265f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009174} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.273214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.273214] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.273214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.285260] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Successfully created port: 56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.316313] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.318750] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f6408fad-a6b8-4868-a192-3acd065935ea/f6408fad-a6b8-4868-a192-3acd065935ea.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.318967] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.319403] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eca8bc6a-1b4c-4a56-8136-4ab404b3db9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.325033] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 824.325033] env[69994]: value = "task-2925473" [ 824.325033] env[69994]: _type = "Task" [ 824.325033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.334142] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.345256] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528f7594-5cc3-97f8-4658-9888f68d53e9, 'name': SearchDatastore_Task, 'duration_secs': 0.008085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.347810] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.348085] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.348547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.348739] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.348952] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1e35f57-89e6-4726-9b13-ff0b12d94765 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.356015] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82fe4d56-b431-423f-a2d2-680bf59ab98f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.357765] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 824.357765] env[69994]: value = "task-2925474" [ 824.357765] env[69994]: _type = "Task" [ 824.357765] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.361901] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.362094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.365321] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8402bbc-d181-47b9-aeb0-c9572f729a5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.370829] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.373699] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 824.373699] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5207e02d-12e2-1b34-7887-62788f92a1bd" [ 824.373699] env[69994]: _type = "Task" [ 824.373699] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.381406] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5207e02d-12e2-1b34-7887-62788f92a1bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.437538] env[69994]: DEBUG oslo_concurrency.lockutils [req-d52322c6-5df0-488b-815a-6a52c2c0aa8f req-55b07489-83a2-458a-8622-841af34efb17 service nova] Releasing lock "refresh_cache-203bc0d6-c149-4c3d-9ac7-962210d6b01d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.439603] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f9dd31-336a-47b1-aad0-3b21794fd1b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.446353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87391323-e603-4f81-b102-59acc1b85b08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.450573] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.477823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1602d9-d38f-4cf5-8497-08155da1adf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.485784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2293c4a1-3e5d-49b3-ab55-1b60c6b876a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.500053] env[69994]: DEBUG nova.compute.provider_tree [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 824.834965] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064806} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.835275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.836322] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdd20ff-1d2d-4c0d-86a9-1212b000d9fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.859658] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] f6408fad-a6b8-4868-a192-3acd065935ea/f6408fad-a6b8-4868-a192-3acd065935ea.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.859978] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bab377d3-5b95-4b61-b845-8c862c7d98b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.885610] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516862} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.889653] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.889865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.890123] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5207e02d-12e2-1b34-7887-62788f92a1bd, 'name': SearchDatastore_Task, 'duration_secs': 0.008467} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.890367] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 824.890367] env[69994]: value = "task-2925475" [ 824.890367] env[69994]: _type = "Task" [ 824.890367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.890544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43c3b793-fe8b-4f5c-b67d-d7c55c38281a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.892639] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4567895c-14a0-4c11-95cb-7d5a8f61b754 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.902114] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.904176] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 824.904176] env[69994]: value = "task-2925476" [ 824.904176] env[69994]: _type = "Task" [ 824.904176] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.904459] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 824.904459] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d60838-aa5c-1612-2c6e-adc2442abf59" [ 824.904459] env[69994]: _type = "Task" [ 824.904459] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.916408] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d60838-aa5c-1612-2c6e-adc2442abf59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.021428] env[69994]: ERROR nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [req-9ccc3d25-285b-49c3-9e6d-6e1170f30549] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9ccc3d25-285b-49c3-9e6d-6e1170f30549"}]} [ 825.038386] env[69994]: DEBUG nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 825.054069] env[69994]: DEBUG nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 825.054400] env[69994]: DEBUG nova.compute.provider_tree [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.067358] env[69994]: DEBUG nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 825.088038] env[69994]: DEBUG nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 825.402597] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925475, 'name': ReconfigVM_Task, 'duration_secs': 0.289393} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.405090] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Reconfigured VM instance instance-00000030 to attach disk [datastore1] f6408fad-a6b8-4868-a192-3acd065935ea/f6408fad-a6b8-4868-a192-3acd065935ea.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.406048] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec6a0e9e-85a3-46eb-8296-40391b5ee246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.419225] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925476, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074858} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.423658] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.424054] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 825.424054] env[69994]: value = "task-2925477" [ 825.424054] env[69994]: _type = "Task" [ 825.424054] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.424314] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d60838-aa5c-1612-2c6e-adc2442abf59, 'name': SearchDatastore_Task, 'duration_secs': 0.020926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.425801] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8355ce82-ff03-4aeb-83ff-7d0cb8d5224a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.428410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.428692] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 203bc0d6-c149-4c3d-9ac7-962210d6b01d/203bc0d6-c149-4c3d-9ac7-962210d6b01d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.429456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a71ae1a-1096-427b-8d4c-112b03ca8b54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.434808] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b044995-efc3-4e88-9744-bf99016e947c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.458670] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.464988] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c10d674-284c-4bdd-82c4-bbaf94fe1267 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.479278] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 825.479278] env[69994]: value = "task-2925478" [ 825.479278] env[69994]: _type = "Task" [ 825.479278] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.479504] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925477, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.480557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6ba8f0-6545-46e7-bd89-aed1214d41c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.485189] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.492342] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 825.492342] env[69994]: value = "task-2925479" [ 825.492342] env[69994]: _type = "Task" [ 825.492342] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.525384] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.528512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1061797-03bc-4e67-9951-b469e4d2e2cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.535599] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925479, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.542129] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.542400] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.542722] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.542999] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.543177] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.543415] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.543669] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.543901] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.544138] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.544321] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.544516] env[69994]: DEBUG nova.virt.hardware [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.545874] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5aa8765-9cd5-45e4-9a14-d1dafe1ba546 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.550948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b7926d-44e2-4847-a53d-abbf171160ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.566172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fb896f-d144-4bd2-88f1-26dd5ba6dd25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.570552] env[69994]: DEBUG nova.compute.provider_tree [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.861773] env[69994]: DEBUG nova.compute.manager [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Received event network-vif-plugged-56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.862047] env[69994]: DEBUG oslo_concurrency.lockutils [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] Acquiring lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.862295] env[69994]: DEBUG oslo_concurrency.lockutils [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] Lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.862469] env[69994]: DEBUG oslo_concurrency.lockutils [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] Lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.862643] env[69994]: DEBUG nova.compute.manager [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] No waiting events found dispatching network-vif-plugged-56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.862811] env[69994]: WARNING nova.compute.manager [req-81870e9a-66c5-4bad-9552-7d693fa6026b req-d8a955b0-bd48-4f06-8b2a-c592c53357f5 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Received unexpected event network-vif-plugged-56bddff9-755c-462f-954d-7b8b28651134 for instance with vm_state building and task_state spawning. [ 825.940679] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925477, 'name': Rename_Task, 'duration_secs': 0.137399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.940940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.941194] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70ee158b-ae4a-415e-b275-895173bed869 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.947171] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 825.947171] env[69994]: value = "task-2925480" [ 825.947171] env[69994]: _type = "Task" [ 825.947171] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.947968] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Successfully updated port: 56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.962333] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925480, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.995432] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925478, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.005287] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925479, 'name': ReconfigVM_Task, 'duration_secs': 0.295499} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.005421] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.006105] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e709f45-2d73-463b-aa20-5d2f6c76ceba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.012755] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 826.012755] env[69994]: value = "task-2925481" [ 826.012755] env[69994]: _type = "Task" [ 826.012755] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.021339] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925481, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.117030] env[69994]: DEBUG nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 826.117030] env[69994]: DEBUG nova.compute.provider_tree [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 72 to 73 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 826.117030] env[69994]: DEBUG nova.compute.provider_tree [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.454964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.455124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.455286] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.460257] env[69994]: DEBUG oslo_vmware.api [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925480, 'name': PowerOnVM_Task, 'duration_secs': 0.479764} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.460777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.460980] env[69994]: INFO nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Took 10.25 seconds to spawn the instance on the hypervisor. [ 826.461217] env[69994]: DEBUG nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.462052] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c26e33-9962-4e8c-89c8-bdaf2da11616 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.495219] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.495538] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 203bc0d6-c149-4c3d-9ac7-962210d6b01d/203bc0d6-c149-4c3d-9ac7-962210d6b01d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.495879] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.496232] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8976493e-7631-4f81-9b2f-b7c7444afcaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.505585] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 826.505585] env[69994]: value = "task-2925482" [ 826.505585] env[69994]: _type = "Task" [ 826.505585] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.514378] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.524336] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925481, 'name': Rename_Task, 'duration_secs': 0.165115} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.524633] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.524933] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-031fbbde-57d4-4ad2-b7e5-2fcdaec2f90d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.530525] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 826.530525] env[69994]: value = "task-2925483" [ 826.530525] env[69994]: _type = "Task" [ 826.530525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.539202] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.622861] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.179s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.626985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.743s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.628416] env[69994]: INFO nova.compute.claims [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.649949] env[69994]: INFO nova.scheduler.client.report [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted allocations for instance 00ab07b7-e7ed-4a71-b684-d5af8b1b7616 [ 826.979665] env[69994]: INFO nova.compute.manager [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Took 47.88 seconds to build instance. [ 826.990890] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.019063] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.019347] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.020226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221077a3-9b53-4f04-ad32-6302288e04a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.045074] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 203bc0d6-c149-4c3d-9ac7-962210d6b01d/203bc0d6-c149-4c3d-9ac7-962210d6b01d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.048150] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-259efbda-e8f1-4673-a076-3ccf7928de66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.067781] env[69994]: DEBUG oslo_vmware.api [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925483, 'name': PowerOnVM_Task, 'duration_secs': 0.474121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.071045] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.071264] env[69994]: DEBUG nova.compute.manager [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.071598] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 827.071598] env[69994]: value = "task-2925484" [ 827.071598] env[69994]: _type = "Task" [ 827.071598] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.072297] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a684c24-cbdd-4c7a-8919-4c1bd75319b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.090023] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925484, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.157025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af0abf30-8ccc-448b-8cd7-0bb6bd8da5f6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "00ab07b7-e7ed-4a71-b684-d5af8b1b7616" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.517s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.187796] env[69994]: DEBUG nova.network.neutron [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Updating instance_info_cache with network_info: [{"id": "56bddff9-755c-462f-954d-7b8b28651134", "address": "fa:16:3e:4b:41:14", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56bddff9-75", "ovs_interfaceid": "56bddff9-755c-462f-954d-7b8b28651134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.481583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9f364cd4-5486-4678-8ae6-59130c5ef129 tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.364s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.584240] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925484, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.594158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.690596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.690950] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Instance network_info: |[{"id": "56bddff9-755c-462f-954d-7b8b28651134", "address": "fa:16:3e:4b:41:14", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56bddff9-75", "ovs_interfaceid": "56bddff9-755c-462f-954d-7b8b28651134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 827.691365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:41:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56bddff9-755c-462f-954d-7b8b28651134', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.699682] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.702666] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.703455] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0f72364-0d6a-482a-ba7f-af778c3e2974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.727281] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.727281] env[69994]: value = "task-2925485" [ 827.727281] env[69994]: _type = "Task" [ 827.727281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.736402] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925485, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.897800] env[69994]: DEBUG nova.compute.manager [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Received event network-changed-56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 827.897992] env[69994]: DEBUG nova.compute.manager [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Refreshing instance network info cache due to event network-changed-56bddff9-755c-462f-954d-7b8b28651134. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 827.898223] env[69994]: DEBUG oslo_concurrency.lockutils [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] Acquiring lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.898365] env[69994]: DEBUG oslo_concurrency.lockutils [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] Acquired lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.898528] env[69994]: DEBUG nova.network.neutron [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Refreshing network info cache for port 56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.053319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d516c6ce-0c2f-4d2b-94e8-359e89ac023f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.062026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edfd510-a266-4b86-b496-03f19195cf14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.097952] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96b7314-1405-4fe0-bb1c-464e8cf85afd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.105692] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925484, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.108895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea81028-953b-4152-8d2b-4d21a451ade4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.122902] env[69994]: DEBUG nova.compute.provider_tree [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.133070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "f6408fad-a6b8-4868-a192-3acd065935ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.133487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.133585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.133954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.134216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.136422] env[69994]: INFO nova.compute.manager [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Terminating instance [ 828.241906] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925485, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.605636] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925484, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.628442] env[69994]: DEBUG nova.scheduler.client.report [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.630628] env[69994]: DEBUG nova.network.neutron [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Updated VIF entry in instance network info cache for port 56bddff9-755c-462f-954d-7b8b28651134. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 828.630998] env[69994]: DEBUG nova.network.neutron [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Updating instance_info_cache with network_info: [{"id": "56bddff9-755c-462f-954d-7b8b28651134", "address": "fa:16:3e:4b:41:14", "network": {"id": "d11115f9-d7da-4b5d-869d-3f5980311128", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1428850886-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5b24b45b67e4a7aade59619ba342f82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56bddff9-75", "ovs_interfaceid": "56bddff9-755c-462f-954d-7b8b28651134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.640202] env[69994]: DEBUG nova.compute.manager [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 828.640306] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.641636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c950e3df-205b-4c21-a623-f87bfcd08305 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.651065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.651333] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76b4b7ae-a28b-47c4-92f3-db4fb8b93cc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.657487] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 828.657487] env[69994]: value = "task-2925486" [ 828.657487] env[69994]: _type = "Task" [ 828.657487] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.665093] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.740310] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925485, 'name': CreateVM_Task, 'duration_secs': 0.66163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.740529] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.741260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.741421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.741771] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 828.742046] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ab5c153-169a-4602-8ee1-a12ba0b3f384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.747433] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 828.747433] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5214627a-cb35-c323-8f48-c4463150c539" [ 828.747433] env[69994]: _type = "Task" [ 828.747433] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.755280] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5214627a-cb35-c323-8f48-c4463150c539, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.860900] env[69994]: INFO nova.compute.manager [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Rebuilding instance [ 828.904916] env[69994]: DEBUG nova.compute.manager [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.905803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f38db25-21bc-4763-95e0-b112572e2852 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.105529] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925484, 'name': ReconfigVM_Task, 'duration_secs': 1.690449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.105828] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 203bc0d6-c149-4c3d-9ac7-962210d6b01d/203bc0d6-c149-4c3d-9ac7-962210d6b01d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.106510] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b24c583c-bcc5-427a-85c5-2f41502fdbe9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.112555] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 829.112555] env[69994]: value = "task-2925487" [ 829.112555] env[69994]: _type = "Task" [ 829.112555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.120524] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925487, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.133866] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.134432] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.137290] env[69994]: DEBUG oslo_concurrency.lockutils [req-78d7c99f-7038-4e46-8815-85601a0f1bbf req-63f9d4e3-709c-4b58-9f1a-4d00343d91ef service nova] Releasing lock "refresh_cache-67f5ad56-9455-43fc-b940-8a67974703cc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.137904] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.371s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.139453] env[69994]: INFO nova.compute.claims [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.167309] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925486, 'name': PowerOffVM_Task, 'duration_secs': 0.207261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.167682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.167836] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 829.168172] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94b2f116-f0e1-4178-b2dd-ca64ee1b0578 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.258548] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5214627a-cb35-c323-8f48-c4463150c539, 'name': SearchDatastore_Task, 'duration_secs': 0.009052} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.259601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.259601] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.259601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.259601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.259803] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.259957] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c514229-e11f-48d9-b701-b3c96b0b350a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.269045] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.269246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.270897] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcdd2d3b-003f-4e4c-91f7-4205073dc6ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.273231] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.273471] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.273674] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleting the datastore file [datastore1] f6408fad-a6b8-4868-a192-3acd065935ea {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.274440] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b246f774-a0d6-4919-adaf-63b054176a3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.281132] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 829.281132] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f28841-a985-0d97-eedb-7dbe6c271565" [ 829.281132] env[69994]: _type = "Task" [ 829.281132] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.282463] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for the task: (returnval){ [ 829.282463] env[69994]: value = "task-2925489" [ 829.282463] env[69994]: _type = "Task" [ 829.282463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.293423] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f28841-a985-0d97-eedb-7dbe6c271565, 'name': SearchDatastore_Task, 'duration_secs': 0.008436} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.296839] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.297034] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c75e49e-9b83-40d4-9302-498b21c4ba29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.302158] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 829.302158] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbf41d-8741-39f1-c203-c7b22615b516" [ 829.302158] env[69994]: _type = "Task" [ 829.302158] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.309980] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbf41d-8741-39f1-c203-c7b22615b516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.623179] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925487, 'name': Rename_Task, 'duration_secs': 0.17117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.623455] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.623699] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-008d0d07-3aa5-4aec-9556-71b9139c8f01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.630827] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 829.630827] env[69994]: value = "task-2925490" [ 829.630827] env[69994]: _type = "Task" [ 829.630827] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.639011] env[69994]: DEBUG nova.compute.utils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.640737] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.641117] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.641308] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.694662] env[69994]: DEBUG nova.policy [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f866b55f082141239e62c9437c5db8be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f632b166593c4f6bb1d6e8b795f9e2e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.795646] env[69994]: DEBUG oslo_vmware.api [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Task: {'id': task-2925489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.795997] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.796280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.796536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.796744] env[69994]: INFO nova.compute.manager [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Took 1.16 seconds to destroy the instance on the hypervisor. [ 829.797009] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.797231] env[69994]: DEBUG nova.compute.manager [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.797338] env[69994]: DEBUG nova.network.neutron [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.811502] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbf41d-8741-39f1-c203-c7b22615b516, 'name': SearchDatastore_Task, 'duration_secs': 0.008466} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.812154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.812512] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 67f5ad56-9455-43fc-b940-8a67974703cc/67f5ad56-9455-43fc-b940-8a67974703cc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 829.812848] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4111c03-faaf-47ee-9cb3-4498b4a749bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.819633] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 829.819633] env[69994]: value = "task-2925491" [ 829.819633] env[69994]: _type = "Task" [ 829.819633] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.828130] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.922027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.922027] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59f06048-add6-4cdf-afc9-99ee6cc4d649 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.928747] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 829.928747] env[69994]: value = "task-2925492" [ 829.928747] env[69994]: _type = "Task" [ 829.928747] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.937984] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.144637] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.147902] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925490, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.335704] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925491, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.439305] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925492, 'name': PowerOffVM_Task, 'duration_secs': 0.204605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.439628] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.439797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.440583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8023b0-e31a-4edf-8104-3d328fa696e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.447070] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.447375] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dfbbebe-15c5-4cc3-aa95-92ac21f60871 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.498902] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Successfully created port: 9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.512253] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.512510] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.512662] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore1] 298a4d59-733f-4cda-a9c2-80dc21be91ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.513277] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0297f8c1-4148-4e84-8dde-a7500523049d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.519586] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 830.519586] env[69994]: value = "task-2925494" [ 830.519586] env[69994]: _type = "Task" [ 830.519586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.530025] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.646019] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925490, 'name': PowerOnVM_Task, 'duration_secs': 0.634452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.646019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.646019] env[69994]: INFO nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Took 9.83 seconds to spawn the instance on the hypervisor. [ 830.646019] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.646019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080d5759-f3fd-4d96-a639-b0f368f371fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.647302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c39908-7538-4916-9fa4-b697c6209622 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.659688] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dad0c0c-aef6-44ac-9ed9-5c7bf96fc5e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.692108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e3cfd0-ce69-4e73-8ad1-c0af5a56df5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.700726] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642bbd7f-99d6-4375-9020-052401863f94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.717682] env[69994]: DEBUG nova.compute.provider_tree [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.827431] env[69994]: DEBUG nova.compute.manager [req-2acbbab6-21b8-46c7-9821-0c549ef538b1 req-316ee8a4-507f-4acd-bc93-46016d2f5485 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-vif-deleted-d88e5a75-d299-4552-ae33-7d3b1508aefd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.827649] env[69994]: INFO nova.compute.manager [req-2acbbab6-21b8-46c7-9821-0c549ef538b1 req-316ee8a4-507f-4acd-bc93-46016d2f5485 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Neutron deleted interface d88e5a75-d299-4552-ae33-7d3b1508aefd; detaching it from the instance and deleting it from the info cache [ 830.827897] env[69994]: DEBUG nova.network.neutron [req-2acbbab6-21b8-46c7-9821-0c549ef538b1 req-316ee8a4-507f-4acd-bc93-46016d2f5485 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updating instance_info_cache with network_info: [{"id": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "address": "fa:16:3e:91:6f:ad", "network": {"id": "1ba51e65-3669-40b4-ad88-3c0bb777027d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1551134203", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a35626d7559c4a72a8f0e932b3d47de7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91b0f7e5-0d1a-46e2-bf73-09656211dea2", "external-id": "nsx-vlan-transportzone-488", "segmentation_id": 488, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa46c420-ff", "ovs_interfaceid": "fa46c420-ff1f-4c8e-a035-2028f969e7c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.834685] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.835211] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 67f5ad56-9455-43fc-b940-8a67974703cc/67f5ad56-9455-43fc-b940-8a67974703cc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.835543] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.835801] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe016e41-a89b-46ac-acb0-e46163e6f857 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.843797] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 830.843797] env[69994]: value = "task-2925495" [ 830.843797] env[69994]: _type = "Task" [ 830.843797] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.856460] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.028791] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193589} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.029065] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.029262] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.029439] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.107596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "75e952e7-6761-49a4-9193-175f5d30494e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.107863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.108099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "75e952e7-6761-49a4-9193-175f5d30494e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.108293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.108467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.110423] env[69994]: INFO nova.compute.manager [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Terminating instance [ 831.160135] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.167782] env[69994]: INFO nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Took 45.58 seconds to build instance. [ 831.184563] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.184807] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.184962] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.185155] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.185302] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.185449] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.185650] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.185806] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.185968] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.186149] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.186396] env[69994]: DEBUG nova.virt.hardware [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.187542] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559f45ed-cfbb-4304-a519-bd106dfe213b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.195915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d4c2a1-2dc7-494e-b620-376ba42a5f08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.220800] env[69994]: DEBUG nova.scheduler.client.report [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.336075] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c4e3c38-6f5a-47b8-a647-b9fdf74d8c5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.345218] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6538cf53-8638-4a62-bc00-5e18f2d50d28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.370850] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071587} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.386097] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.386549] env[69994]: DEBUG nova.compute.manager [req-2acbbab6-21b8-46c7-9821-0c549ef538b1 req-316ee8a4-507f-4acd-bc93-46016d2f5485 service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Detach interface failed, port_id=d88e5a75-d299-4552-ae33-7d3b1508aefd, reason: Instance f6408fad-a6b8-4868-a192-3acd065935ea could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 831.387507] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab2f729-f23d-46ed-b848-4ac49e839b31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.410430] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 67f5ad56-9455-43fc-b940-8a67974703cc/67f5ad56-9455-43fc-b940-8a67974703cc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.410733] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d931d802-9420-49da-8a46-4e83292783d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.429279] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 831.429279] env[69994]: value = "task-2925496" [ 831.429279] env[69994]: _type = "Task" [ 831.429279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.436438] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.460901] env[69994]: DEBUG nova.network.neutron [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.613849] env[69994]: DEBUG nova.compute.manager [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 831.614154] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.614988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f329fa95-2f08-4814-8b3e-9a0a64f5f005 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.624390] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.624710] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d53a0079-957d-4830-b505-1f83baa0135c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.630729] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 831.630729] env[69994]: value = "task-2925497" [ 831.630729] env[69994]: _type = "Task" [ 831.630729] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.639199] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.670113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.694s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.726032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.726212] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.729218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.720s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.730643] env[69994]: DEBUG nova.objects.instance [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lazy-loading 'resources' on Instance uuid 9a1343a8-11b4-4c9e-8445-931eab036a4d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 831.939459] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925496, 'name': ReconfigVM_Task, 'duration_secs': 0.279697} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.939757] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 67f5ad56-9455-43fc-b940-8a67974703cc/67f5ad56-9455-43fc-b940-8a67974703cc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.940378] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6999aa97-eb5f-474c-bb32-eb9aa9006e9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.947669] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 831.947669] env[69994]: value = "task-2925498" [ 831.947669] env[69994]: _type = "Task" [ 831.947669] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.957283] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925498, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.962788] env[69994]: INFO nova.compute.manager [-] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Took 2.17 seconds to deallocate network for instance. [ 832.069262] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.069529] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.069685] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.069866] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.070015] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.070165] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.070372] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.070566] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.070696] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.070859] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.071041] env[69994]: DEBUG nova.virt.hardware [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.071913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6797ab65-d0d1-4f16-98c0-236ae8b028f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.081192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89b1916-d27b-4851-904e-88ab0c6fd3bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.096830] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:df:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a81f7c8f-8499-4f74-8860-fca65590ea7b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.104508] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.104782] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.104996] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3803e8c-eee4-416f-a8f6-3beb1fea1d08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.123875] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.123875] env[69994]: value = "task-2925499" [ 832.123875] env[69994]: _type = "Task" [ 832.123875] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.133722] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925499, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.141439] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925497, 'name': PowerOffVM_Task, 'duration_secs': 0.206471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.141720] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.141886] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 832.142149] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e806b5c-cdf7-4cbd-a401-74fa8d87fef2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.215294] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 832.215576] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 832.215698] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Deleting the datastore file [datastore1] 75e952e7-6761-49a4-9193-175f5d30494e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.215961] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a4e0092-b9d0-461e-ac66-2f1c849ebd66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.222428] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for the task: (returnval){ [ 832.222428] env[69994]: value = "task-2925501" [ 832.222428] env[69994]: _type = "Task" [ 832.222428] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.230130] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.232560] env[69994]: DEBUG nova.compute.utils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.236371] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.236549] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.280891] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Successfully updated port: 9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.293826] env[69994]: DEBUG nova.policy [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3253706fa314bf6a8aaeb6ac4c6504f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd7386da3414f198142cee5c6d383b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.460169] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925498, 'name': Rename_Task, 'duration_secs': 0.143369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.460444] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.460696] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e53e0e73-d668-4be2-b8ca-1bdabd0f7405 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.469642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.469959] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 832.469959] env[69994]: value = "task-2925502" [ 832.469959] env[69994]: _type = "Task" [ 832.469959] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.477462] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.607412] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Successfully created port: a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.634093] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925499, 'name': CreateVM_Task, 'duration_secs': 0.339023} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.634360] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.635337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.635568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.636060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.636435] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c38bee-59e8-4568-a132-b18ff9ef561b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.641858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2b084e-d6b8-4b97-9756-5953bdc0490d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.647247] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 832.647247] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd142c-2de7-b987-7d58-7c482aa8278a" [ 832.647247] env[69994]: _type = "Task" [ 832.647247] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.655303] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f48ce5-3e94-4846-ad63-aace211aefda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.665450] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd142c-2de7-b987-7d58-7c482aa8278a, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.666173] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.666441] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.666685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.666831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.667018] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.667295] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9da5089-660b-47d3-97e4-f4d91979edc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.703440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbc5514-397d-458e-9c84-a9f67260da21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.708649] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800ae4c7-4d28-4411-a12b-1c97060f106c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.714776] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.715061] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.716287] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aeac1c5-0b8b-429b-97b0-7f2c13d2ad26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.728389] env[69994]: DEBUG nova.compute.provider_tree [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.734911] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 832.734911] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f65036-aa7e-233b-b1d7-16414db78c85" [ 832.734911] env[69994]: _type = "Task" [ 832.734911] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.741938] env[69994]: DEBUG nova.compute.utils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.743342] env[69994]: DEBUG oslo_vmware.api [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Task: {'id': task-2925501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136771} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.744642] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.744832] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 832.745051] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.745253] env[69994]: INFO nova.compute.manager [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 832.745507] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.745971] env[69994]: DEBUG nova.compute.manager [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 832.746097] env[69994]: DEBUG nova.network.neutron [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.751665] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f65036-aa7e-233b-b1d7-16414db78c85, 'name': SearchDatastore_Task, 'duration_secs': 0.009639} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.752784] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3c64f25-d244-4185-9bfe-9cabfb98eef2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.759258] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 832.759258] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eadc3a-36d3-90ea-6e90-7404a622638b" [ 832.759258] env[69994]: _type = "Task" [ 832.759258] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.770471] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eadc3a-36d3-90ea-6e90-7404a622638b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.784998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.785455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.785560] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.887329] env[69994]: DEBUG nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Received event network-vif-deleted-fa46c420-ff1f-4c8e-a035-2028f969e7c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.887619] env[69994]: DEBUG nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received event network-vif-plugged-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.887836] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Acquiring lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.888207] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.888496] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.888643] env[69994]: DEBUG nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] No waiting events found dispatching network-vif-plugged-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.888825] env[69994]: WARNING nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received unexpected event network-vif-plugged-9f4cb414-15f4-4fb9-9ad2-6622cded83db for instance with vm_state building and task_state spawning. [ 832.889078] env[69994]: DEBUG nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.889249] env[69994]: DEBUG nova.compute.manager [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing instance network info cache due to event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 832.889513] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Acquiring lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.981127] env[69994]: DEBUG oslo_vmware.api [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925502, 'name': PowerOnVM_Task, 'duration_secs': 0.495703} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.981462] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 832.983114] env[69994]: INFO nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Took 7.50 seconds to spawn the instance on the hypervisor. [ 832.983114] env[69994]: DEBUG nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.983114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93abfc20-4976-45aa-97e5-4cf1aa7a6c00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.236526] env[69994]: DEBUG nova.scheduler.client.report [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.244436] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 833.274962] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eadc3a-36d3-90ea-6e90-7404a622638b, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.275299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.275564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.275830] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-972607b5-e9f9-4f5a-a14c-d547efc97c8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.288491] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 833.288491] env[69994]: value = "task-2925503" [ 833.288491] env[69994]: _type = "Task" [ 833.288491] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.298753] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.332861] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.502079] env[69994]: INFO nova.compute.manager [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Took 34.79 seconds to build instance. [ 833.526736] env[69994]: DEBUG nova.network.neutron [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [{"id": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "address": "fa:16:3e:c0:2f:cd", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f4cb414-15", "ovs_interfaceid": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.741923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.744353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.065s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.745865] env[69994]: INFO nova.compute.claims [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.769356] env[69994]: INFO nova.scheduler.client.report [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Deleted allocations for instance 9a1343a8-11b4-4c9e-8445-931eab036a4d [ 833.796867] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925503, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.856644] env[69994]: DEBUG nova.network.neutron [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.004040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae20dc0-dbdc-4a31-84a4-d7bf248e72e1 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.999s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.029416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.030260] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Instance network_info: |[{"id": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "address": "fa:16:3e:c0:2f:cd", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f4cb414-15", "ovs_interfaceid": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 834.030583] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Acquired lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.030822] env[69994]: DEBUG nova.network.neutron [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.034039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:2f:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e029825-6c65-4ac7-88f6-65f9d106db76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f4cb414-15f4-4fb9-9ad2-6622cded83db', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.041571] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.042273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.043234] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a25863bb-6ffc-4e5c-99b9-fb9d92085696 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.071391] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.071391] env[69994]: value = "task-2925504" [ 834.071391] env[69994]: _type = "Task" [ 834.071391] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.080970] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925504, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.256247] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.281175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-461481c3-28a4-4b07-803e-815da5eb73b6 tempest-ServerAddressesNegativeTestJSON-1313240219 tempest-ServerAddressesNegativeTestJSON-1313240219-project-member] Lock "9a1343a8-11b4-4c9e-8445-931eab036a4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.209s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.288390] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:30:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='428827283',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-28844606',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.288732] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.288797] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.288993] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.289602] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.289875] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.290179] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.290426] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.290663] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.290901] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.291153] env[69994]: DEBUG nova.virt.hardware [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.292512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c66324-df33-4c3d-9f87-c89e77fac64c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.310566] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d3de7e-f3f5-400f-8342-0d7b3f66eaee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.315402] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.858086} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.315996] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.316322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.317021] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65110cb8-f566-4033-8d08-dc46e3ef4225 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.331086] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 834.331086] env[69994]: value = "task-2925505" [ 834.331086] env[69994]: _type = "Task" [ 834.331086] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.339252] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.358394] env[69994]: INFO nova.compute.manager [-] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Took 1.61 seconds to deallocate network for instance. [ 834.581960] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925504, 'name': CreateVM_Task, 'duration_secs': 0.368879} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.583026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.583435] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.583659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.584810] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.585140] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1b1973-93ad-4413-b7b2-51e6a9982817 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.592264] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 834.592264] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d95b0-7c70-120d-85d2-ab49c79d80e4" [ 834.592264] env[69994]: _type = "Task" [ 834.592264] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.601452] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d95b0-7c70-120d-85d2-ab49c79d80e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.645249] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Successfully updated port: a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.783171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.783530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.784094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.784094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.784246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.791562] env[69994]: INFO nova.compute.manager [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Terminating instance [ 834.826520] env[69994]: DEBUG nova.network.neutron [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updated VIF entry in instance network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.826881] env[69994]: DEBUG nova.network.neutron [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [{"id": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "address": "fa:16:3e:c0:2f:cd", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f4cb414-15", "ovs_interfaceid": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.844017] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067979} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.847118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.847118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf9e8f7-8dcd-4bc0-9398-a133203e8310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.869532] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.872555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.872785] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a157412-7deb-4afa-a4f5-f882d7097a96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.893805] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 834.893805] env[69994]: value = "task-2925506" [ 834.893805] env[69994]: _type = "Task" [ 834.893805] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.904007] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925506, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.923855] env[69994]: DEBUG nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Received event network-vif-deleted-d5b5dcfa-33de-47f7-8356-2384f6ed2083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.924061] env[69994]: DEBUG nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Received event network-vif-plugged-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.924266] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Acquiring lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.924470] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.924635] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.924797] env[69994]: DEBUG nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] No waiting events found dispatching network-vif-plugged-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.924957] env[69994]: WARNING nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Received unexpected event network-vif-plugged-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 for instance with vm_state building and task_state spawning. [ 834.925128] env[69994]: DEBUG nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Received event network-changed-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.925280] env[69994]: DEBUG nova.compute.manager [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Refreshing instance network info cache due to event network-changed-a9985d6b-dfb2-4569-99f9-a42c283e7cd1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.925455] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Acquiring lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.925697] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Acquired lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.925873] env[69994]: DEBUG nova.network.neutron [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Refreshing network info cache for port a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.999231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "67f5ad56-9455-43fc-b940-8a67974703cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.999231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.999231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.999231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.999231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.005050] env[69994]: INFO nova.compute.manager [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Terminating instance [ 835.102307] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d95b0-7c70-120d-85d2-ab49c79d80e4, 'name': SearchDatastore_Task, 'duration_secs': 0.063728} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.102671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.102948] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.103235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.103421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.103641] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.103962] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61f644e5-ff0e-4d2e-a390-d447318a6d69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.119789] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.120150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.122456] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff7f8557-dcda-423e-80d6-75810e3c0dd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.128926] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 835.128926] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52962084-0f0b-b104-9d8e-debd8d6dffac" [ 835.128926] env[69994]: _type = "Task" [ 835.128926] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.137139] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52962084-0f0b-b104-9d8e-debd8d6dffac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.147687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.186805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f3b8b2-8858-4466-ae5c-c26602b17119 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.196747] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75afd51-19cf-4e29-a91d-790fbf542352 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.227761] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f1b76d-0873-4e1a-be48-584b3a92c6ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.237649] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4875ed-fa3c-4626-b735-b3a1730c037c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.249079] env[69994]: DEBUG nova.compute.provider_tree [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.299351] env[69994]: DEBUG nova.compute.manager [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 835.299623] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.300445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544943fb-39de-4eed-a66d-553f843be352 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.309746] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.309746] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab5a391c-e396-45bd-961e-5389299ad2da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.314498] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 835.314498] env[69994]: value = "task-2925507" [ 835.314498] env[69994]: _type = "Task" [ 835.314498] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.322651] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.329855] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffe6ffbb-3997-4f7a-83bf-d6c87d3356e7 req-c0646d05-5497-43ef-86c5-37c21033379c service nova] Releasing lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.404320] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.482232] env[69994]: DEBUG nova.network.neutron [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.513385] env[69994]: DEBUG nova.compute.manager [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 835.513385] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.513625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e48fa56-6b0d-4e2b-8818-5c8abab56f74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.527399] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.527723] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78aa8a32-7d51-401e-aced-ecb1b86ec2c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.533641] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 835.533641] env[69994]: value = "task-2925508" [ 835.533641] env[69994]: _type = "Task" [ 835.533641] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.545034] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.616480] env[69994]: DEBUG nova.network.neutron [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.643287] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52962084-0f0b-b104-9d8e-debd8d6dffac, 'name': SearchDatastore_Task, 'duration_secs': 0.035286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.643815] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bf26682-1393-43f5-aefb-a54087b378de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.650796] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 835.650796] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8517a-a8c0-4876-f13d-646e92b59d10" [ 835.650796] env[69994]: _type = "Task" [ 835.650796] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.660226] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8517a-a8c0-4876-f13d-646e92b59d10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.752057] env[69994]: DEBUG nova.scheduler.client.report [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.824314] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925507, 'name': PowerOffVM_Task, 'duration_secs': 0.204398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.824598] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.824767] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.825033] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dd6160f-93b1-4945-9906-93ef43e1575c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.882057] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.882057] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.882057] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleting the datastore file [datastore1] 203bc0d6-c149-4c3d-9ac7-962210d6b01d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.882057] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad8d5d0c-7103-4a81-afed-7d6b6d23f57f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.887810] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 835.887810] env[69994]: value = "task-2925510" [ 835.887810] env[69994]: _type = "Task" [ 835.887810] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.895995] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.903377] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925506, 'name': ReconfigVM_Task, 'duration_secs': 0.71617} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.903694] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca/298a4d59-733f-4cda-a9c2-80dc21be91ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.904339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce308ab8-8b68-429c-889d-55197723bcf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.909432] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 835.909432] env[69994]: value = "task-2925511" [ 835.909432] env[69994]: _type = "Task" [ 835.909432] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.920022] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925511, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.046754] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925508, 'name': PowerOffVM_Task, 'duration_secs': 0.177089} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.047994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.047994] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.047994] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53a18c5c-cf32-4bae-8d06-758d34e9cc5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.109098] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.109349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.109529] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleting the datastore file [datastore1] 67f5ad56-9455-43fc-b940-8a67974703cc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.109792] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66b9e530-71a8-40bb-8c0e-544783eae119 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.115809] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for the task: (returnval){ [ 836.115809] env[69994]: value = "task-2925513" [ 836.115809] env[69994]: _type = "Task" [ 836.115809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.120862] env[69994]: DEBUG oslo_concurrency.lockutils [req-4470d02e-6c09-4c81-bfec-d5fa1eba6297 req-c5fb660d-1f78-415f-88e1-9a60c1c32141 service nova] Releasing lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.123962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.124137] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.125317] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.161119] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8517a-a8c0-4876-f13d-646e92b59d10, 'name': SearchDatastore_Task, 'duration_secs': 0.012695} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.161406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.161862] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] bb062ddc-5281-4957-bb9d-8f5c0b0ba526/bb062ddc-5281-4957-bb9d-8f5c0b0ba526.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.161988] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da44e72e-dc86-4450-97b4-cbc8ee4c539b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.168135] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 836.168135] env[69994]: value = "task-2925514" [ 836.168135] env[69994]: _type = "Task" [ 836.168135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.177645] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.265390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.266142] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.269442] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.829s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.269730] env[69994]: DEBUG nova.objects.instance [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'resources' on Instance uuid e46b8a11-650a-4e34-bc4a-e1c1b2515e76 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.401743] env[69994]: DEBUG oslo_vmware.api [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.403272] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.403862] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.404428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.404844] env[69994]: INFO nova.compute.manager [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 836.405394] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.405895] env[69994]: DEBUG nova.compute.manager [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.406211] env[69994]: DEBUG nova.network.neutron [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.419025] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925511, 'name': Rename_Task, 'duration_secs': 0.133983} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.420147] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.420147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b435861e-9ead-48be-804a-7c7fb9003f2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.426946] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 836.426946] env[69994]: value = "task-2925515" [ 836.426946] env[69994]: _type = "Task" [ 836.426946] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.439490] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.628799] env[69994]: DEBUG oslo_vmware.api [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Task: {'id': task-2925513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.629362] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.629362] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.629643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.629837] env[69994]: INFO nova.compute.manager [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 836.630278] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.630826] env[69994]: DEBUG nova.compute.manager [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.631325] env[69994]: DEBUG nova.network.neutron [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.678112] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468521} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.678410] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] bb062ddc-5281-4957-bb9d-8f5c0b0ba526/bb062ddc-5281-4957-bb9d-8f5c0b0ba526.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.678642] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.678902] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf06435a-1cc2-454e-bce0-fc3d1a7b87fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.685984] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 836.685984] env[69994]: value = "task-2925516" [ 836.685984] env[69994]: _type = "Task" [ 836.685984] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.693674] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.697039] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.773606] env[69994]: DEBUG nova.compute.utils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.779169] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.779456] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.787312] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.851305] env[69994]: DEBUG nova.compute.manager [req-766cb4e7-77a7-4079-8281-1816a6f494b0 req-a5d220e0-f916-40ea-a822-b195b52bddda service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Received event network-vif-deleted-2d80d63c-74f9-46d4-b671-7fb818563da4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.851305] env[69994]: INFO nova.compute.manager [req-766cb4e7-77a7-4079-8281-1816a6f494b0 req-a5d220e0-f916-40ea-a822-b195b52bddda service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Neutron deleted interface 2d80d63c-74f9-46d4-b671-7fb818563da4; detaching it from the instance and deleting it from the info cache [ 836.851739] env[69994]: DEBUG nova.network.neutron [req-766cb4e7-77a7-4079-8281-1816a6f494b0 req-a5d220e0-f916-40ea-a822-b195b52bddda service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.856742] env[69994]: DEBUG nova.policy [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550fe2bfeab14f0fa409c65d98954e7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21bf4c6f3b2c45218949b0e6c1eb84fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.941030] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925515, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.974779] env[69994]: DEBUG nova.network.neutron [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Updating instance_info_cache with network_info: [{"id": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "address": "fa:16:3e:d6:8b:50", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9985d6b-df", "ovs_interfaceid": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.154278] env[69994]: DEBUG nova.compute.manager [req-b95cce51-7991-4ed9-891d-3ed82a0d97f8 req-0ca745d5-a1d4-4ecb-8a95-18b9358fa119 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Received event network-vif-deleted-56bddff9-755c-462f-954d-7b8b28651134 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.154415] env[69994]: INFO nova.compute.manager [req-b95cce51-7991-4ed9-891d-3ed82a0d97f8 req-0ca745d5-a1d4-4ecb-8a95-18b9358fa119 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Neutron deleted interface 56bddff9-755c-462f-954d-7b8b28651134; detaching it from the instance and deleting it from the info cache [ 837.154829] env[69994]: DEBUG nova.network.neutron [req-b95cce51-7991-4ed9-891d-3ed82a0d97f8 req-0ca745d5-a1d4-4ecb-8a95-18b9358fa119 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.197741] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062738} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.198111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.198885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358188f9-a9a1-4936-9aca-128634d2855d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.201300] env[69994]: DEBUG nova.network.neutron [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.224575] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] bb062ddc-5281-4957-bb9d-8f5c0b0ba526/bb062ddc-5281-4957-bb9d-8f5c0b0ba526.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.228194] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3baa91c3-b174-4fe9-b9e9-a67769e93c16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.249592] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 837.249592] env[69994]: value = "task-2925517" [ 837.249592] env[69994]: _type = "Task" [ 837.249592] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.259429] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.347635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7db2b0-66f6-4de6-8661-49b90c01a80c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.355318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9760ec7d-3d9c-4e81-adb9-78c84a0cb321 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.360306] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-669cec5f-8a21-4673-9bad-88296eabba84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.392146] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Successfully created port: 76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.395539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cf4616-f711-48e9-bc95-93c95037f351 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.402369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72427272-79df-4682-bca2-aefb8678b834 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.418936] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c943588-3863-405b-8c43-fdfcbdc4886b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.433416] env[69994]: DEBUG nova.compute.provider_tree [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.454755] env[69994]: DEBUG nova.compute.manager [req-766cb4e7-77a7-4079-8281-1816a6f494b0 req-a5d220e0-f916-40ea-a822-b195b52bddda service nova] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Detach interface failed, port_id=2d80d63c-74f9-46d4-b671-7fb818563da4, reason: Instance 203bc0d6-c149-4c3d-9ac7-962210d6b01d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 837.460670] env[69994]: DEBUG oslo_vmware.api [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925515, 'name': PowerOnVM_Task, 'duration_secs': 0.532552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.460984] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.461248] env[69994]: DEBUG nova.compute.manager [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.462036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a64034-1c53-4b86-ab27-f0f3acd5ddf0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.477881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.478160] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance network_info: |[{"id": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "address": "fa:16:3e:d6:8b:50", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9985d6b-df", "ovs_interfaceid": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 837.478559] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:8b:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f44b2fa3-6730-4b87-8839-947eff21213f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9985d6b-dfb2-4569-99f9-a42c283e7cd1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.486237] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.486516] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.486761] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f843f92-dad1-4cce-bd82-7f5c2009cd53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.507913] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.507913] env[69994]: value = "task-2925518" [ 837.507913] env[69994]: _type = "Task" [ 837.507913] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.517076] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925518, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.574731] env[69994]: DEBUG nova.network.neutron [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.657413] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf157721-43a6-4eed-95cf-c540490459e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.666289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760a07c9-4c59-4710-8bde-6f09c59312b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.699951] env[69994]: DEBUG nova.compute.manager [req-b95cce51-7991-4ed9-891d-3ed82a0d97f8 req-0ca745d5-a1d4-4ecb-8a95-18b9358fa119 service nova] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Detach interface failed, port_id=56bddff9-755c-462f-954d-7b8b28651134, reason: Instance 67f5ad56-9455-43fc-b940-8a67974703cc could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 837.703646] env[69994]: INFO nova.compute.manager [-] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Took 1.30 seconds to deallocate network for instance. [ 837.758946] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925517, 'name': ReconfigVM_Task, 'duration_secs': 0.286642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.759074] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Reconfigured VM instance instance-00000033 to attach disk [datastore2] bb062ddc-5281-4957-bb9d-8f5c0b0ba526/bb062ddc-5281-4957-bb9d-8f5c0b0ba526.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.759727] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-797a892b-7cd3-4cb7-a83e-c5d9228b9506 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.765692] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 837.765692] env[69994]: value = "task-2925519" [ 837.765692] env[69994]: _type = "Task" [ 837.765692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.773178] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925519, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.799804] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.826048] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.826368] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.826562] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.826843] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.827038] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.827201] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.827514] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.827764] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.828026] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.828211] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.828393] env[69994]: DEBUG nova.virt.hardware [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.829255] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fd99fe-fab4-41fa-aab8-010203b0043d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.837131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2907aafa-a4c3-437e-a487-0f3dd18a0878 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.951847] env[69994]: DEBUG nova.scheduler.client.report [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.982802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.019037] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925518, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.077548] env[69994]: INFO nova.compute.manager [-] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Took 1.45 seconds to deallocate network for instance. [ 838.211324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.276037] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925519, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.458236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.461702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 29.602s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.495098] env[69994]: INFO nova.scheduler.client.report [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted allocations for instance e46b8a11-650a-4e34-bc4a-e1c1b2515e76 [ 838.531303] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925518, 'name': CreateVM_Task, 'duration_secs': 0.706033} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.531303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.531644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.531823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.532210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 838.532472] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2beea51-2bc3-4186-86c0-412c2d76c3e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.537333] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 838.537333] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c751e0-e587-b5fb-99bd-64ce8a64208f" [ 838.537333] env[69994]: _type = "Task" [ 838.537333] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.546714] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c751e0-e587-b5fb-99bd-64ce8a64208f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.584430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.784235] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925519, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.975019] env[69994]: INFO nova.compute.claims [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.005727] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e5c0c6c-4c90-453b-9afe-a9aebb9151a8 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.480s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.006058] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Acquired lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.007893] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fad59bc-9616-4dca-8032-e4a35caa79d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.017166] env[69994]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 839.018416] env[69994]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69994) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 839.020268] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c76de63-50e4-4f6f-bd4c-072d37f57d9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.030328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686ee418-0576-4712-be3d-50f970eff199 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.051651] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c751e0-e587-b5fb-99bd-64ce8a64208f, 'name': SearchDatastore_Task, 'duration_secs': 0.00999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.051936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.052412] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.052726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.052896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.053181] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.053452] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b50219e-22cc-4184-918b-6c3dc5642710 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.072298] env[69994]: ERROR root [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-587443' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-587443' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-587443' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-587443'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-587443' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-587443' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-587443'}\n"]: nova.exception.InstanceNotFound: Instance e46b8a11-650a-4e34-bc4a-e1c1b2515e76 could not be found. [ 839.072474] env[69994]: DEBUG oslo_concurrency.lockutils [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] Releasing lock "e46b8a11-650a-4e34-bc4a-e1c1b2515e76" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.073191] env[69994]: DEBUG nova.compute.manager [req-e16aa7c5-5fcb-49ce-8aea-354f5acb3870 req-1b27f54a-d6cd-4bff-8e98-78e90d350a51 service nova] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Detach interface failed, port_id=016f8957-6a5e-4487-b3e5-cb437366c800, reason: Instance e46b8a11-650a-4e34-bc4a-e1c1b2515e76 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 839.083232] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.083232] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.086107] env[69994]: DEBUG nova.compute.manager [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Received event network-vif-plugged-76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.086107] env[69994]: DEBUG oslo_concurrency.lockutils [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] Acquiring lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.086107] env[69994]: DEBUG oslo_concurrency.lockutils [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.086107] env[69994]: DEBUG oslo_concurrency.lockutils [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.086107] env[69994]: DEBUG nova.compute.manager [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] No waiting events found dispatching network-vif-plugged-76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 839.086107] env[69994]: WARNING nova.compute.manager [req-34010b84-0e15-48af-8c34-39727b020a23 req-b3c4cec6-f1e2-4580-a1d2-8fe274af5cfe service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Received unexpected event network-vif-plugged-76010ade-afe2-44ed-bf2f-ed07bdaac451 for instance with vm_state building and task_state spawning. [ 839.088633] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e28302f-a1e2-426b-bf7f-02e9907bccca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.091969] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 839.091969] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526511c5-b08f-e1c7-b2d6-f056b0c8966a" [ 839.091969] env[69994]: _type = "Task" [ 839.091969] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.100291] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526511c5-b08f-e1c7-b2d6-f056b0c8966a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.216776] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Successfully updated port: 76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.278027] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925519, 'name': Rename_Task, 'duration_secs': 1.477044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.278334] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.278585] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2714e012-ad90-4e8a-995d-0bb1b3b06d8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.286586] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 839.286586] env[69994]: value = "task-2925520" [ 839.286586] env[69994]: _type = "Task" [ 839.286586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.299701] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "956306bc-4701-4c04-8221-8ec0b9df73ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.356257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.357362] env[69994]: INFO nova.compute.manager [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Terminating instance [ 839.481328] env[69994]: INFO nova.compute.resource_tracker [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating resource usage from migration 18246889-8825-4ae1-9687-068788eac036 [ 839.603882] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526511c5-b08f-e1c7-b2d6-f056b0c8966a, 'name': SearchDatastore_Task, 'duration_secs': 0.014436} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.605232] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55d57428-d2cf-4972-8e51-bd8ec6f1b3b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.613031] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 839.613031] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e50ab-fbda-2c97-9295-0a4b69e15cb4" [ 839.613031] env[69994]: _type = "Task" [ 839.613031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.625941] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e50ab-fbda-2c97-9295-0a4b69e15cb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.720015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.720015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.720015] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.805673] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925520, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.863772] env[69994]: DEBUG nova.compute.manager [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 839.865163] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 839.865163] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cb38fc-663d-4152-a45c-b854d4c992d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.873187] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.873904] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7f4cdea-c55b-4185-af51-336d071d9684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.879190] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 839.879190] env[69994]: value = "task-2925521" [ 839.879190] env[69994]: _type = "Task" [ 839.879190] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.888972] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.021408] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbedb9c6-17f9-4ae1-a285-106bdf39a790 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.029872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa32579-8ff9-4be8-9a77-bca3aae273e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.061516] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d49ddd9-ab3f-49ea-908c-a396532fa47b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.069598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7c1625-5600-4f76-88f6-b0e21d0495ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.085501] env[69994]: DEBUG nova.compute.provider_tree [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.122746] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e50ab-fbda-2c97-9295-0a4b69e15cb4, 'name': SearchDatastore_Task, 'duration_secs': 0.012442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.123876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.124737] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/234c2683-80f3-4f29-bcc9-9853338128bd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.125418] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2ef6bd2-5868-4c03-a793-77cde118f396 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.132186] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 840.132186] env[69994]: value = "task-2925522" [ 840.132186] env[69994]: _type = "Task" [ 840.132186] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.141827] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.255679] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.299120] env[69994]: DEBUG oslo_vmware.api [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925520, 'name': PowerOnVM_Task, 'duration_secs': 0.522586} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.299511] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.299616] env[69994]: INFO nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Took 9.14 seconds to spawn the instance on the hypervisor. [ 840.299804] env[69994]: DEBUG nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.300621] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc74de7-99c6-4654-8f10-33321e3a2531 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.389787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "06fa5ab5-baab-466e-8574-5391247c13a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.390370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.399278] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925521, 'name': PowerOffVM_Task, 'duration_secs': 0.334571} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.400906] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.400906] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 840.400906] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b188504-b46b-4d6b-add2-76bccf6c7457 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.474397] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 840.474397] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 840.474572] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore2] 956306bc-4701-4c04-8221-8ec0b9df73ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 840.474840] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5ed054d-b084-4a22-80c3-0d661e469ef7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.482385] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 840.482385] env[69994]: value = "task-2925524" [ 840.482385] env[69994]: _type = "Task" [ 840.482385] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.492049] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.493134] env[69994]: DEBUG nova.network.neutron [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Updating instance_info_cache with network_info: [{"id": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "address": "fa:16:3e:51:2f:1c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76010ade-af", "ovs_interfaceid": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.590203] env[69994]: DEBUG nova.scheduler.client.report [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.642310] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925522, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.821177] env[69994]: INFO nova.compute.manager [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Took 38.96 seconds to build instance. [ 840.893830] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.999503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.999812] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Instance network_info: |[{"id": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "address": "fa:16:3e:51:2f:1c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76010ade-af", "ovs_interfaceid": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 841.000151] env[69994]: DEBUG oslo_vmware.api [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365098} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.000824] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:2f:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76010ade-afe2-44ed-bf2f-ed07bdaac451', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.011088] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 841.011088] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.012093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 841.012093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.012093] env[69994]: INFO nova.compute.manager [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Took 1.15 seconds to destroy the instance on the hypervisor. [ 841.012093] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 841.012093] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.012296] env[69994]: DEBUG nova.compute.manager [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 841.012332] env[69994]: DEBUG nova.network.neutron [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.015015] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4f7a8fd-f818-482a-b533-ef25aea4e96b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.039372] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.039372] env[69994]: value = "task-2925525" [ 841.039372] env[69994]: _type = "Task" [ 841.039372] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.048809] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925525, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.096552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.635s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.096795] env[69994]: INFO nova.compute.manager [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Migrating [ 841.110308] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.314s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.112474] env[69994]: INFO nova.compute.claims [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.146711] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.147027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/234c2683-80f3-4f29-bcc9-9853338128bd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.147262] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.147831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68ffb166-407f-468f-86ce-c1057322d8bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.155055] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 841.155055] env[69994]: value = "task-2925526" [ 841.155055] env[69994]: _type = "Task" [ 841.155055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.164970] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.300903] env[69994]: DEBUG nova.compute.manager [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Received event network-changed-76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.301131] env[69994]: DEBUG nova.compute.manager [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Refreshing instance network info cache due to event network-changed-76010ade-afe2-44ed-bf2f-ed07bdaac451. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.301348] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] Acquiring lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.301512] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] Acquired lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.301645] env[69994]: DEBUG nova.network.neutron [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Refreshing network info cache for port 76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.324043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fb8db34-3944-4e6a-9ba8-9ba866724d8d tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.162s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.419798] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.549390] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925525, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.620752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.620978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.621234] env[69994]: DEBUG nova.network.neutron [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.664844] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.665134] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.665902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88699a5-6d10-47e4-baa7-7c444f6779df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.689047] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/234c2683-80f3-4f29-bcc9-9853338128bd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.689453] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2b97468-f5d6-4e6d-94fe-c85723a49441 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.714051] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 841.714051] env[69994]: value = "task-2925527" [ 841.714051] env[69994]: _type = "Task" [ 841.714051] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.722765] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.789737] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.789737] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.947206] env[69994]: DEBUG nova.network.neutron [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.051465] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925525, 'name': CreateVM_Task, 'duration_secs': 0.641631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.051784] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.052489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.052716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.053099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.053384] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83d293de-9546-4140-8c5a-04fd8f455ddc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.060891] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 842.060891] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c54bbe-cb42-31b2-f698-bcc86f4932cc" [ 842.060891] env[69994]: _type = "Task" [ 842.060891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.070183] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c54bbe-cb42-31b2-f698-bcc86f4932cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.095388] env[69994]: DEBUG nova.network.neutron [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Updated VIF entry in instance network info cache for port 76010ade-afe2-44ed-bf2f-ed07bdaac451. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.095745] env[69994]: DEBUG nova.network.neutron [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Updating instance_info_cache with network_info: [{"id": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "address": "fa:16:3e:51:2f:1c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76010ade-af", "ovs_interfaceid": "76010ade-afe2-44ed-bf2f-ed07bdaac451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.227405] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.303580] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.303907] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.304036] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.304198] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.304343] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.304490] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.304624] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 842.304766] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.427679] env[69994]: DEBUG nova.network.neutron [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.443703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.444053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.449466] env[69994]: INFO nova.compute.manager [-] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Took 1.44 seconds to deallocate network for instance. [ 842.579604] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c54bbe-cb42-31b2-f698-bcc86f4932cc, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.580395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.580631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.581527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.581527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.581527] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.581527] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ff38eae-4df9-4082-a04a-e4a15a269d65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.593010] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.593260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.594096] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25c6079-96d3-4756-b50a-937e0ccc9511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.597972] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dbdc743-1b35-47c2-bbcd-7845c686f28a req-872c1109-5853-423f-9050-33bac7008d20 service nova] Releasing lock "refresh_cache-b4c6b628-426e-4efc-b8b6-0c2937ef6df3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.601791] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 842.601791] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526c474d-753b-8b1a-8cda-3f1bb1a8c038" [ 842.601791] env[69994]: _type = "Task" [ 842.601791] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.608119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dfc7b1-205b-41c5-a2bf-1b57222b2b1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.615467] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526c474d-753b-8b1a-8cda-3f1bb1a8c038, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.617760] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dc4e6c9-ea67-4add-8a6c-891866840d25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.620577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1177144e-8c0e-4668-85ed-9ebd1955a339 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.628521] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 842.628521] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cd729c-3106-9fc9-f14e-ba5e3b5f31c9" [ 842.628521] env[69994]: _type = "Task" [ 842.628521] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.656775] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37141895-233f-42f0-836a-f982f2c9f059 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.664606] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cd729c-3106-9fc9-f14e-ba5e3b5f31c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012006} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.666641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.666907] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] b4c6b628-426e-4efc-b8b6-0c2937ef6df3/b4c6b628-426e-4efc-b8b6-0c2937ef6df3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.667209] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3ad9ab0-b0c3-4993-8369-ca9e4fb52970 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.669974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dece5a-aeb6-4938-a034-446cdb020191 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.683064] env[69994]: DEBUG nova.compute.provider_tree [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.688096] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 842.688096] env[69994]: value = "task-2925528" [ 842.688096] env[69994]: _type = "Task" [ 842.688096] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.693468] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.722714] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925527, 'name': ReconfigVM_Task, 'duration_secs': 0.802109} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.722977] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/234c2683-80f3-4f29-bcc9-9853338128bd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.723317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69994) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 842.723976] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-820ab219-86fc-4720-99bb-0b8a62d44346 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.730166] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 842.730166] env[69994]: value = "task-2925529" [ 842.730166] env[69994]: _type = "Task" [ 842.730166] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.738225] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925529, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.807219] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.930701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.948971] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 842.956316] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.177403] env[69994]: DEBUG nova.compute.manager [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 843.178866] env[69994]: DEBUG nova.compute.manager [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing instance network info cache due to event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 843.178866] env[69994]: DEBUG oslo_concurrency.lockutils [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.178866] env[69994]: DEBUG oslo_concurrency.lockutils [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.178866] env[69994]: DEBUG nova.network.neutron [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.191018] env[69994]: DEBUG nova.scheduler.client.report [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.201193] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.201485] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] b4c6b628-426e-4efc-b8b6-0c2937ef6df3/b4c6b628-426e-4efc-b8b6-0c2937ef6df3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.201696] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.201982] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c69563fb-818a-4e84-84ad-0cb994218bef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.208798] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 843.208798] env[69994]: value = "task-2925530" [ 843.208798] env[69994]: _type = "Task" [ 843.208798] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.219055] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.239396] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925529, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.049582} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.240224] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69994) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 843.240536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078764f1-f4b1-418a-a23f-3d98bf370fdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.266734] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/ephemeral_0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.267164] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84477dc6-432d-4104-bc76-78b9ec3536c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.284917] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 843.284917] env[69994]: value = "task-2925531" [ 843.284917] env[69994]: _type = "Task" [ 843.284917] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.292911] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925531, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.474610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.617253] env[69994]: DEBUG nova.compute.manager [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Received event network-vif-deleted-264f000d-41b7-4904-8621-8cd06efa69c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 843.617550] env[69994]: DEBUG nova.compute.manager [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 843.617628] env[69994]: DEBUG nova.compute.manager [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing instance network info cache due to event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 843.617806] env[69994]: DEBUG oslo_concurrency.lockutils [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.695834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.696375] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.699303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.581s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.699521] env[69994]: DEBUG nova.objects.instance [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'resources' on Instance uuid e0764e41-0810-45a1-8917-ac901f0f8321 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.720823] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072164} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.720823] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.721128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422441af-f723-485a-9928-4d748bce842b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.745199] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] b4c6b628-426e-4efc-b8b6-0c2937ef6df3/b4c6b628-426e-4efc-b8b6-0c2937ef6df3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.748607] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8795539c-8a1f-43d5-b63d-2b297511e9dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.771770] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 843.771770] env[69994]: value = "task-2925532" [ 843.771770] env[69994]: _type = "Task" [ 843.771770] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.780964] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925532, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.793988] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.008495] env[69994]: DEBUG nova.network.neutron [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updated VIF entry in instance network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.008832] env[69994]: DEBUG nova.network.neutron [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.205019] env[69994]: DEBUG nova.compute.utils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.205019] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.205019] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.265569] env[69994]: DEBUG nova.policy [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6b3fc4efaa848af87e5477968138d5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0aa49f699b774618b7db9991296e8209', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.282427] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925532, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.294375] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.449422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e9832-7662-479d-af13-8f856d63c866 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.471885] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 844.516186] env[69994]: DEBUG oslo_concurrency.lockutils [req-94102f33-e126-49b8-bff1-4e01ed51ded4 req-dd2ec048-6846-4c2b-bd73-f4e14831fd62 service nova] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.518929] env[69994]: DEBUG oslo_concurrency.lockutils [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.518929] env[69994]: DEBUG nova.network.neutron [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.685128] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Successfully created port: 7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.711399] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.729646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcda2a2e-91e3-4710-8e8e-e5828b42383d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.739161] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74cfed7-d507-4bae-b0b2-8560fe05f563 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.778745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59627db8-21a9-4bb2-9b1c-c148a09121c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.787873] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925532, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.794210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02627e4-24e2-45cf-817a-7db922549214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.803788] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925531, 'name': ReconfigVM_Task, 'duration_secs': 1.231845} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.812018] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd/ephemeral_0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.813119] env[69994]: DEBUG nova.compute.provider_tree [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.814519] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1f5093e-6fa1-4d7e-a830-ac0aedb00440 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.821922] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 844.821922] env[69994]: value = "task-2925533" [ 844.821922] env[69994]: _type = "Task" [ 844.821922] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.831770] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.982862] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.983214] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37c50d70-63a9-4f16-acc8-d475ef70150a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.991093] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 844.991093] env[69994]: value = "task-2925534" [ 844.991093] env[69994]: _type = "Task" [ 844.991093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.001621] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.286841] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925532, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.318574] env[69994]: DEBUG nova.scheduler.client.report [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.331242] env[69994]: DEBUG nova.network.neutron [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updated VIF entry in instance network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.331631] env[69994]: DEBUG nova.network.neutron [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.343396] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.376313] env[69994]: DEBUG nova.compute.manager [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.376562] env[69994]: DEBUG nova.compute.manager [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing instance network info cache due to event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 845.376798] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Acquiring lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.376939] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Acquired lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.377344] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.380354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.380765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.380765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.380895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.381014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.382767] env[69994]: INFO nova.compute.manager [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Terminating instance [ 845.500996] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925534, 'name': PowerOffVM_Task, 'duration_secs': 0.179788} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.501368] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 845.501561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 845.696385] env[69994]: DEBUG nova.compute.manager [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.696879] env[69994]: DEBUG nova.compute.manager [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing instance network info cache due to event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 845.697143] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.724601] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.754176] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.754176] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.754176] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.754481] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.754481] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.754571] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.755653] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.755653] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.755653] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.755653] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.755653] env[69994]: DEBUG nova.virt.hardware [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.756526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8aa9cf-7df7-4bee-ae6e-aa1f4ff1e063 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.765900] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3686fe-69a6-489a-bacf-aad555c471ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.790155] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925532, 'name': ReconfigVM_Task, 'duration_secs': 1.960686} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.790570] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Reconfigured VM instance instance-00000035 to attach disk [datastore2] b4c6b628-426e-4efc-b8b6-0c2937ef6df3/b4c6b628-426e-4efc-b8b6-0c2937ef6df3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.791407] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-273ebc2a-7480-4476-902c-7b6316a1a264 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.798123] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 845.798123] env[69994]: value = "task-2925535" [ 845.798123] env[69994]: _type = "Task" [ 845.798123] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.808369] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925535, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.835273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.135s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.841165] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.841692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.468s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.843811] env[69994]: INFO nova.compute.claims [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.846727] env[69994]: DEBUG oslo_concurrency.lockutils [req-5174a85b-a8b7-403b-9803-d81829f2cfb9 req-5df25adf-0ac4-4dca-8414-72740225c330 service nova] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.847257] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.847455] env[69994]: DEBUG nova.network.neutron [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.867837] env[69994]: INFO nova.scheduler.client.report [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted allocations for instance e0764e41-0810-45a1-8917-ac901f0f8321 [ 845.886390] env[69994]: DEBUG nova.compute.manager [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 845.886644] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 845.888840] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f8300c-3891-4be4-b6b2-c7c563931ab2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.897341] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 845.897599] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd7820fa-3230-45ce-ae7f-e97d4bc92c72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.904509] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 845.904509] env[69994]: value = "task-2925536" [ 845.904509] env[69994]: _type = "Task" [ 845.904509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.920196] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 846.010734] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 846.018964] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb4f0ee1-54b7-4334-be21-73963923e890 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.036047] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 846.036047] env[69994]: value = "task-2925537" [ 846.036047] env[69994]: _type = "Task" [ 846.036047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.046522] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925537, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.167738] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updated VIF entry in instance network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.168059] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [{"id": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "address": "fa:16:3e:c0:2f:cd", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f4cb414-15", "ovs_interfaceid": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.310721] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925535, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.333273] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.378183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cee57838-70b1-4783-adf9-50fcce04313f tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "e0764e41-0810-45a1-8917-ac901f0f8321" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.694s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.416820] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925536, 'name': PowerOffVM_Task, 'duration_secs': 0.198491} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.417184] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 846.417385] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 846.417674] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45e594b4-fa2a-4523-a84e-2cf332122c54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.496681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 846.496921] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 846.497128] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleting the datastore file [datastore2] bb062ddc-5281-4957-bb9d-8f5c0b0ba526 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 846.497392] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86737733-5219-4746-b213-711d950f2106 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.508943] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 846.508943] env[69994]: value = "task-2925539" [ 846.508943] env[69994]: _type = "Task" [ 846.508943] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.518735] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.548462] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925537, 'name': ReconfigVM_Task, 'duration_secs': 0.282458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.549009] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 846.657546] env[69994]: DEBUG nova.network.neutron [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updated VIF entry in instance network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.657996] env[69994]: DEBUG nova.network.neutron [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.671790] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Releasing lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.671936] env[69994]: DEBUG nova.compute.manager [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.672173] env[69994]: DEBUG nova.compute.manager [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing instance network info cache due to event network-changed-9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 846.672409] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Acquiring lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.672554] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Acquired lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.672890] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Refreshing network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.726666] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Successfully updated port: 7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.811694] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925535, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.834049] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.025257] env[69994]: DEBUG oslo_vmware.api [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215678} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.028102] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.028352] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 847.028556] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 847.028774] env[69994]: INFO nova.compute.manager [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Took 1.14 seconds to destroy the instance on the hypervisor. [ 847.029028] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.030851] env[69994]: DEBUG nova.compute.manager [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 847.030851] env[69994]: DEBUG nova.network.neutron [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 847.055669] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:32:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='231a442d-6575-43ed-8970-683d59890f06',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1776927979',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 847.057020] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 847.057332] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 847.057332] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 847.057980] env[69994]: DEBUG nova.virt.hardware [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 847.063112] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 847.066353] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8d9236d-3c11-4e43-8664-55c1cf50b4a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.095843] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 847.095843] env[69994]: value = "task-2925540" [ 847.095843] env[69994]: _type = "Task" [ 847.095843] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.110192] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925540, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.161852] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b2dbc3b-d5d0-4653-beaa-6f65337a28a4 req-cbf6d716-ee3e-4ee3-8298-2f1306f580a6 service nova] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.236923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.237253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquired lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.237578] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.321303] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925535, 'name': Rename_Task, 'duration_secs': 1.131232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.326851] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.327332] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcd8a71e-4e15-4e4c-8e5c-c837ee2f2781 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.339060] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925533, 'name': Rename_Task, 'duration_secs': 2.02632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.343284] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.343786] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 847.343786] env[69994]: value = "task-2925541" [ 847.343786] env[69994]: _type = "Task" [ 847.343786] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.343979] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b284326-08ef-4bf6-9fce-a0532537c4a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.361837] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 847.361837] env[69994]: value = "task-2925542" [ 847.361837] env[69994]: _type = "Task" [ 847.361837] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.362136] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925541, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.376670] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.430957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b3ab5e-89f1-40f0-bf81-bd8e6ec30aee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.438719] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1dc5ff-5be2-4522-92da-b9545d3f36fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.471899] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updated VIF entry in instance network info cache for port 9f4cb414-15f4-4fb9-9ad2-6622cded83db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.472274] env[69994]: DEBUG nova.network.neutron [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [{"id": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "address": "fa:16:3e:c0:2f:cd", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f4cb414-15", "ovs_interfaceid": "9f4cb414-15f4-4fb9-9ad2-6622cded83db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.473968] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f4a89e-2e69-4963-8569-1cb6830335a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.482418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d0bfbf-8d25-4669-a754-ab8c8182876b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.497323] env[69994]: DEBUG nova.compute.provider_tree [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.608831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.609108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.609324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.609505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.609669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.611339] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925540, 'name': ReconfigVM_Task, 'duration_secs': 0.177169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.611758] env[69994]: INFO nova.compute.manager [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Terminating instance [ 847.613067] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 847.614660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5631449f-149d-4b93-8ba2-b2142e4a221d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.638197] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.638650] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbce7a73-6fee-42a8-b222-6116689ec013 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.656926] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 847.656926] env[69994]: value = "task-2925543" [ 847.656926] env[69994]: _type = "Task" [ 847.656926] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.665700] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.736069] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Received event network-vif-plugged-7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.736380] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Acquiring lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.736484] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.736709] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.736900] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] No waiting events found dispatching network-vif-plugged-7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.737100] env[69994]: WARNING nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Received unexpected event network-vif-plugged-7058cb7d-792e-4141-9145-c7cfde6b5700 for instance with vm_state building and task_state spawning. [ 847.737281] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.737438] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing instance network info cache due to event network-changed-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.737630] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Acquiring lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.737775] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Acquired lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.737934] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Refreshing network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.771691] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.855842] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925541, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.872333] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925542, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.900410] env[69994]: DEBUG nova.network.neutron [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.938602] env[69994]: DEBUG nova.network.neutron [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Updating instance_info_cache with network_info: [{"id": "7058cb7d-792e-4141-9145-c7cfde6b5700", "address": "fa:16:3e:05:aa:79", "network": {"id": "29c6908b-b3de-4d58-94c6-407dc40338ac", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-687684932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aa49f699b774618b7db9991296e8209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7058cb7d-79", "ovs_interfaceid": "7058cb7d-792e-4141-9145-c7cfde6b5700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.978232] env[69994]: DEBUG oslo_concurrency.lockutils [req-4eadd670-d077-4d84-8f0b-a49f30c939d4 req-7dcbb490-f1d2-4619-98fb-b577ba20bcb6 service nova] Releasing lock "refresh_cache-bb062ddc-5281-4957-bb9d-8f5c0b0ba526" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.001229] env[69994]: DEBUG nova.scheduler.client.report [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.118514] env[69994]: DEBUG nova.compute.manager [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.118741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.119741] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295c8789-681d-4ce3-85ad-606df43662b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.128099] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.128336] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8ffc942-28bf-40f8-b574-b233f33e16de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.134504] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 848.134504] env[69994]: value = "task-2925544" [ 848.134504] env[69994]: _type = "Task" [ 848.134504] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.142737] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.165674] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925543, 'name': ReconfigVM_Task, 'duration_secs': 0.266741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.165946] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.166263] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 848.356402] env[69994]: DEBUG oslo_vmware.api [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925541, 'name': PowerOnVM_Task, 'duration_secs': 0.549536} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.356744] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.356988] env[69994]: INFO nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Took 10.56 seconds to spawn the instance on the hypervisor. [ 848.357204] env[69994]: DEBUG nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.358028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33be9c27-7107-4956-89c0-3e766eda6759 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.375875] env[69994]: DEBUG oslo_vmware.api [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925542, 'name': PowerOnVM_Task, 'duration_secs': 0.531842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.376168] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.376371] env[69994]: INFO nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Took 14.12 seconds to spawn the instance on the hypervisor. [ 848.376622] env[69994]: DEBUG nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.377535] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f33bbf6-9860-465c-9020-3a043b0dd549 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.403608] env[69994]: INFO nova.compute.manager [-] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Took 1.37 seconds to deallocate network for instance. [ 848.442955] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Releasing lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.443299] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Instance network_info: |[{"id": "7058cb7d-792e-4141-9145-c7cfde6b5700", "address": "fa:16:3e:05:aa:79", "network": {"id": "29c6908b-b3de-4d58-94c6-407dc40338ac", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-687684932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aa49f699b774618b7db9991296e8209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7058cb7d-79", "ovs_interfaceid": "7058cb7d-792e-4141-9145-c7cfde6b5700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 848.443704] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:aa:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7058cb7d-792e-4141-9145-c7cfde6b5700', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.451326] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Creating folder: Project (0aa49f699b774618b7db9991296e8209). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.451901] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7e5c1cc-b731-4054-903b-24c88647e0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.457579] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updated VIF entry in instance network info cache for port 9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.457918] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [{"id": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "address": "fa:16:3e:12:51:b9", "network": {"id": "a6b07358-99ca-488f-b73e-8f1cbcdfe80d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-754407706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f632b166593c4f6bb1d6e8b795f9e2e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e029825-6c65-4ac7-88f6-65f9d106db76", "external-id": "nsx-vlan-transportzone-428", "segmentation_id": 428, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd8099f-e3", "ovs_interfaceid": "9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.464284] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Created folder: Project (0aa49f699b774618b7db9991296e8209) in parent group-v587342. [ 848.464284] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Creating folder: Instances. Parent ref: group-v587492. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.464284] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1204d5f7-9f85-4514-95f8-e603f898c272 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.472305] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Created folder: Instances in parent group-v587492. [ 848.472532] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.472710] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.472906] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7145855-cb84-41cf-ba72-055a8df985f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.491258] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.491258] env[69994]: value = "task-2925547" [ 848.491258] env[69994]: _type = "Task" [ 848.491258] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.498569] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925547, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.506437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.506988] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 848.509550] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.300s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.509685] env[69994]: DEBUG nova.objects.instance [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lazy-loading 'resources' on Instance uuid 6aacfc4e-32b4-40d7-8240-e4449cf78925 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.645980] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925544, 'name': PowerOffVM_Task, 'duration_secs': 0.21106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.646228] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 848.646454] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 848.646779] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86e0c37c-b46d-49d8-b5dd-9a26c7388dee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.673517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee2a18c-d89e-4f80-9c4a-2d7ca9dd0cbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.694982] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280e84bb-97ac-41ed-858c-c0772bc89b7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.713891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 848.718878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 848.719138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 848.719345] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleting the datastore file [datastore1] a589ddb9-947b-4ff4-94f6-1fab4bdb874b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.719887] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1fcc3c2-1585-446e-ad24-7318b325d5e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.725879] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for the task: (returnval){ [ 848.725879] env[69994]: value = "task-2925549" [ 848.725879] env[69994]: _type = "Task" [ 848.725879] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.733772] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.883048] env[69994]: INFO nova.compute.manager [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Took 42.22 seconds to build instance. [ 848.900420] env[69994]: INFO nova.compute.manager [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Took 45.16 seconds to build instance. [ 848.910331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.960301] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Releasing lock "refresh_cache-9b6aca3c-337b-4067-80e0-487d956fabc7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.960590] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Received event network-changed-7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.960764] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Refreshing instance network info cache due to event network-changed-7058cb7d-792e-4141-9145-c7cfde6b5700. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 848.960974] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Acquiring lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.961130] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Acquired lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.961289] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Refreshing network info cache for port 7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.001065] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925547, 'name': CreateVM_Task, 'duration_secs': 0.328241} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.001231] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.001906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.002072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.002383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 849.003019] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15f350a2-873e-4ee7-9ca8-218732d3d6d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.008121] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 849.008121] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5281c08a-ff13-68f7-c1b8-c371836a9202" [ 849.008121] env[69994]: _type = "Task" [ 849.008121] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.014739] env[69994]: DEBUG nova.compute.utils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.017675] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.017675] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.023656] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5281c08a-ff13-68f7-c1b8-c371836a9202, 'name': SearchDatastore_Task, 'duration_secs': 0.009644} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.023929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.024170] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.024403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.024579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.024778] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.025046] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdc28538-2f56-4355-9db0-3fb9c13dfcf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.033734] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.033834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.034516] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62487ce-f121-4d45-a6cb-96c9da0750f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.040973] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 849.040973] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8dfe8-5b9c-983f-bd5b-f5ad01169675" [ 849.040973] env[69994]: _type = "Task" [ 849.040973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.053645] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8dfe8-5b9c-983f-bd5b-f5ad01169675, 'name': SearchDatastore_Task, 'duration_secs': 0.008182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.054456] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe6548c-ff87-40aa-988b-2bb160fef8c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.060882] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 849.060882] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525d944c-77cf-c917-f1ce-9435b1940299" [ 849.060882] env[69994]: _type = "Task" [ 849.060882] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.067039] env[69994]: DEBUG nova.policy [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c345e7849e994e38b9b5a050255115a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3185ee244cc74a8896b062af9d4e1478', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.075511] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525d944c-77cf-c917-f1ce-9435b1940299, 'name': SearchDatastore_Task, 'duration_secs': 0.008529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.075858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.076220] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9d146d57-9948-4b18-a3f3-675b53d137ed/9d146d57-9948-4b18-a3f3-675b53d137ed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.076507] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2119633-a5dd-4ca8-8d0f-fb53941b7cb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.083033] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 849.083033] env[69994]: value = "task-2925550" [ 849.083033] env[69994]: _type = "Task" [ 849.083033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.097347] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.242763] env[69994]: DEBUG oslo_vmware.api [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Task: {'id': task-2925549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139942} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.243633] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.244027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.244124] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.244370] env[69994]: INFO nova.compute.manager [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 849.244676] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.244897] env[69994]: DEBUG nova.compute.manager [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.244997] env[69994]: DEBUG nova.network.neutron [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.276720] env[69994]: DEBUG nova.network.neutron [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Port 885142d2-3a31-487c-b773-a0b0df2e4e40 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 849.386967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26ffe7ad-65cc-4dac-ba3f-aea5d1e294b6 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.043s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.404371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82822379-46b3-4096-aaa2-10ea2edaf090 tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.267s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.520109] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 849.543775] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520ad014-db0b-4c98-a8b2-8dfb42b5b945 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.553807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1876fbc-4b20-4117-aea0-2a9b0bafa6e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.592285] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Successfully created port: 429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.597236] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7004146a-2aa4-4959-9147-5a3dfc2ada29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.606905] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450818} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.609094] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9d146d57-9948-4b18-a3f3-675b53d137ed/9d146d57-9948-4b18-a3f3-675b53d137ed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.609383] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.610636] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abf56be0-aeff-46ae-ba26-6342f182c49c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.613114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f0923b-17a8-4329-8c9b-24ff8ac5731e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.631559] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 849.631559] env[69994]: value = "task-2925551" [ 849.631559] env[69994]: _type = "Task" [ 849.631559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.632031] env[69994]: DEBUG nova.compute.provider_tree [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.641363] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.741136] env[69994]: DEBUG nova.compute.manager [req-1a490afc-f592-4c38-bdee-cbf095b67d09 req-05842326-fb8e-4cd4-a0da-4e4cba7dbd9c service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Received event network-vif-deleted-77322171-a52e-49ee-a04c-5aecaebff021 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.741291] env[69994]: INFO nova.compute.manager [req-1a490afc-f592-4c38-bdee-cbf095b67d09 req-05842326-fb8e-4cd4-a0da-4e4cba7dbd9c service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Neutron deleted interface 77322171-a52e-49ee-a04c-5aecaebff021; detaching it from the instance and deleting it from the info cache [ 849.741419] env[69994]: DEBUG nova.network.neutron [req-1a490afc-f592-4c38-bdee-cbf095b67d09 req-05842326-fb8e-4cd4-a0da-4e4cba7dbd9c service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.767676] env[69994]: DEBUG nova.compute.manager [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Received event network-changed-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.767874] env[69994]: DEBUG nova.compute.manager [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Refreshing instance network info cache due to event network-changed-a9985d6b-dfb2-4569-99f9-a42c283e7cd1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 849.768284] env[69994]: DEBUG oslo_concurrency.lockutils [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] Acquiring lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.768468] env[69994]: DEBUG oslo_concurrency.lockutils [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] Acquired lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.768638] env[69994]: DEBUG nova.network.neutron [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Refreshing network info cache for port a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.801888] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Updated VIF entry in instance network info cache for port 7058cb7d-792e-4141-9145-c7cfde6b5700. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.802705] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Updating instance_info_cache with network_info: [{"id": "7058cb7d-792e-4141-9145-c7cfde6b5700", "address": "fa:16:3e:05:aa:79", "network": {"id": "29c6908b-b3de-4d58-94c6-407dc40338ac", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-687684932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0aa49f699b774618b7db9991296e8209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7058cb7d-79", "ovs_interfaceid": "7058cb7d-792e-4141-9145-c7cfde6b5700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.948937] env[69994]: DEBUG nova.compute.manager [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.949877] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b018099-b97f-4603-90ed-03aa4061ab94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.145542] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.306374} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.145839] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.146703] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919f5baf-d3d9-4c3b-82c0-c2ec5b0e194c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.168840] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 9d146d57-9948-4b18-a3f3-675b53d137ed/9d146d57-9948-4b18-a3f3-675b53d137ed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.169964] env[69994]: ERROR nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] [req-875be734-03d0-4885-a538-de38538af4e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-875be734-03d0-4885-a538-de38538af4e3"}]} [ 850.170344] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369a83a1-bfb2-4477-a907-144951f19b6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.193451] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 850.193451] env[69994]: value = "task-2925552" [ 850.193451] env[69994]: _type = "Task" [ 850.193451] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.200815] env[69994]: DEBUG nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 850.207358] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925552, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.217393] env[69994]: DEBUG nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 850.217632] env[69994]: DEBUG nova.compute.provider_tree [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.224220] env[69994]: DEBUG nova.network.neutron [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.230195] env[69994]: DEBUG nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 850.243760] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ed549f2-01d2-4883-b300-99d5c912149f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.249300] env[69994]: DEBUG nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 850.255335] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18601b75-d569-4879-811a-7bc920a908fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.295489] env[69994]: DEBUG nova.compute.manager [req-1a490afc-f592-4c38-bdee-cbf095b67d09 req-05842326-fb8e-4cd4-a0da-4e4cba7dbd9c service nova] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Detach interface failed, port_id=77322171-a52e-49ee-a04c-5aecaebff021, reason: Instance a589ddb9-947b-4ff4-94f6-1fab4bdb874b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 850.304187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.304411] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.304588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.307509] env[69994]: DEBUG oslo_concurrency.lockutils [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] Releasing lock "refresh_cache-9d146d57-9948-4b18-a3f3-675b53d137ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.307509] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Received event network-vif-deleted-9f4cb414-15f4-4fb9-9ad2-6622cded83db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.307890] env[69994]: INFO nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Neutron deleted interface 9f4cb414-15f4-4fb9-9ad2-6622cded83db; detaching it from the instance and deleting it from the info cache [ 850.308105] env[69994]: DEBUG nova.network.neutron [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.461569] env[69994]: INFO nova.compute.manager [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] instance snapshotting [ 850.464305] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7acb92f-dad8-4542-b7b1-b3056caf6477 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.488867] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ad6822-4c43-430c-80fe-1ec645e38561 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.536412] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 850.563394] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.563643] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.563801] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.563987] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.564146] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.564294] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.564562] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.564745] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.564914] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.565578] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.565805] env[69994]: DEBUG nova.virt.hardware [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.566995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f7237b-334c-41b8-bc3e-f9852d469cbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.574143] env[69994]: DEBUG nova.network.neutron [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Updated VIF entry in instance network info cache for port a9985d6b-dfb2-4569-99f9-a42c283e7cd1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.574933] env[69994]: DEBUG nova.network.neutron [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Updating instance_info_cache with network_info: [{"id": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "address": "fa:16:3e:d6:8b:50", "network": {"id": "596cb0a2-fc6e-400a-89ef-dcae93a7ee7b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1499088312-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd7386da3414f198142cee5c6d383b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f44b2fa3-6730-4b87-8839-947eff21213f", "external-id": "nsx-vlan-transportzone-984", "segmentation_id": 984, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9985d6b-df", "ovs_interfaceid": "a9985d6b-dfb2-4569-99f9-a42c283e7cd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.579521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f542a9-206c-4ba3-b259-7178751d51ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.693198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542132e8-c00a-4e27-925f-ec65ddc56cef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.709933] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925552, 'name': ReconfigVM_Task, 'duration_secs': 0.310413} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.711895] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 9d146d57-9948-4b18-a3f3-675b53d137ed/9d146d57-9948-4b18-a3f3-675b53d137ed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.712589] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ec7aadf-41c3-401d-a501-4b837d7be78c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.714875] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133fcff3-a4b4-4fd0-b334-88dcb53d3bb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.746926] env[69994]: INFO nova.compute.manager [-] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Took 1.50 seconds to deallocate network for instance. [ 850.750711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c7fd9b-b5e2-425d-b7e8-60e62785faaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.753725] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 850.753725] env[69994]: value = "task-2925553" [ 850.753725] env[69994]: _type = "Task" [ 850.753725] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.763826] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d618a7d8-9c02-4b96-9889-6550f6de3e87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.771109] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925553, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.781008] env[69994]: DEBUG nova.compute.provider_tree [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.811373] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc70ff1c-3742-4306-910e-7817010005f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.825328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d38535-5da7-4204-904b-e69a6a52bf24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.870017] env[69994]: DEBUG nova.compute.manager [req-a591fcab-ac44-4839-9b12-d1f5abc746cd req-85cd41fc-f92d-4a28-8197-4b4373838ecf service nova] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Detach interface failed, port_id=9f4cb414-15f4-4fb9-9ad2-6622cded83db, reason: Instance bb062ddc-5281-4957-bb9d-8f5c0b0ba526 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 851.007118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 851.007468] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1812e546-5c68-46c8-a4b6-db1dcce2cea5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.017976] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 851.017976] env[69994]: value = "task-2925554" [ 851.017976] env[69994]: _type = "Task" [ 851.017976] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.026737] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925554, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.079840] env[69994]: DEBUG oslo_concurrency.lockutils [req-66d2317b-06f6-4e0c-8097-73354a6a6293 req-803787a4-88a9-4394-8ac9-8880775b424e service nova] Releasing lock "refresh_cache-234c2683-80f3-4f29-bcc9-9853338128bd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.266936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.267288] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925553, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.325717] env[69994]: DEBUG nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 851.325984] env[69994]: DEBUG nova.compute.provider_tree [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 75 to 76 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 851.326181] env[69994]: DEBUG nova.compute.provider_tree [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.374516] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.374712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.374890] env[69994]: DEBUG nova.network.neutron [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.439854] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Successfully updated port: 429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.526180] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925554, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.767175] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925553, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.769238] env[69994]: DEBUG nova.compute.manager [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Received event network-vif-plugged-429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.769445] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Acquiring lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.769653] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.769818] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.769987] env[69994]: DEBUG nova.compute.manager [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] No waiting events found dispatching network-vif-plugged-429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 851.770194] env[69994]: WARNING nova.compute.manager [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Received unexpected event network-vif-plugged-429b3de3-f864-4315-a305-15e8cd0c31fb for instance with vm_state building and task_state spawning. [ 851.770357] env[69994]: DEBUG nova.compute.manager [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Received event network-changed-429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.770510] env[69994]: DEBUG nova.compute.manager [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Refreshing instance network info cache due to event network-changed-429b3de3-f864-4315-a305-15e8cd0c31fb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 851.770717] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Acquiring lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.770889] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Acquired lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.771077] env[69994]: DEBUG nova.network.neutron [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Refreshing network info cache for port 429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.831493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.322s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.833931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.117s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.834183] env[69994]: DEBUG nova.objects.instance [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lazy-loading 'resources' on Instance uuid 3c814c83-20cc-4871-9f30-5c0c7d99b8a1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.853113] env[69994]: INFO nova.scheduler.client.report [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Deleted allocations for instance 6aacfc4e-32b4-40d7-8240-e4449cf78925 [ 851.942762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.028467] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925554, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.171418] env[69994]: DEBUG nova.network.neutron [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.267678] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925553, 'name': Rename_Task, 'duration_secs': 1.141289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.268026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.268289] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d910c48e-050f-4ab8-9073-17f5b6064c16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.276233] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 852.276233] env[69994]: value = "task-2925555" [ 852.276233] env[69994]: _type = "Task" [ 852.276233] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.283989] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.302517] env[69994]: DEBUG nova.network.neutron [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.363437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f2e756e3-db12-4324-b531-6a1ea0143627 tempest-ServersAdminNegativeTestJSON-371890694 tempest-ServersAdminNegativeTestJSON-371890694-project-member] Lock "6aacfc4e-32b4-40d7-8240-e4449cf78925" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.334s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.387737] env[69994]: DEBUG nova.network.neutron [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.529868] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925554, 'name': CreateSnapshot_Task, 'duration_secs': 1.489533} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.530170] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 852.531286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2caef25-5840-48fb-9919-4a9a899140c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.674859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.722141] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ab8eac-f68f-4b86-a7e2-532ad1a00a85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.730934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef98d023-d7ee-4993-a9ef-75f65f178765 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.762410] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a594a6a-8826-48e9-b7da-de259dd78f68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.770012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce92894-02c4-4c8d-8f75-103905257df6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.788603] env[69994]: DEBUG nova.compute.provider_tree [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.794970] env[69994]: DEBUG oslo_vmware.api [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925555, 'name': PowerOnVM_Task, 'duration_secs': 0.438847} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.795320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.795541] env[69994]: INFO nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 7.07 seconds to spawn the instance on the hypervisor. [ 852.795758] env[69994]: DEBUG nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.797034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f05d0bb-274a-41be-8714-ed43a44cd204 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.893450] env[69994]: DEBUG oslo_concurrency.lockutils [req-7531fa98-ec3b-4fd4-b9a4-14f16a0a859b req-ba37b2f6-ff8b-4310-bc93-a56ca34259d6 service nova] Releasing lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.893921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.894169] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.051633] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 853.051963] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-defc5024-dd58-4a06-998f-d773c1ebe4fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.060739] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 853.060739] env[69994]: value = "task-2925556" [ 853.060739] env[69994]: _type = "Task" [ 853.060739] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.068969] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925556, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.197164] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9866159a-9bff-41b6-acd4-de824481c731 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.216911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1733b9-1ade-4bc5-a401-c1357371ca3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.224094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 853.292149] env[69994]: DEBUG nova.scheduler.client.report [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.313999] env[69994]: INFO nova.compute.manager [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 43.54 seconds to build instance. [ 853.461482] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.572402] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925556, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.608891] env[69994]: DEBUG nova.network.neutron [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Updating instance_info_cache with network_info: [{"id": "429b3de3-f864-4315-a305-15e8cd0c31fb", "address": "fa:16:3e:7a:a6:9e", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429b3de3-f8", "ovs_interfaceid": "429b3de3-f864-4315-a305-15e8cd0c31fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.732392] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.732392] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45de7ee5-14da-4676-a093-1f5c443e27c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.740331] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 853.740331] env[69994]: value = "task-2925557" [ 853.740331] env[69994]: _type = "Task" [ 853.740331] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.750465] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.797253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.963s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.799951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.295s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.801823] env[69994]: INFO nova.compute.claims [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.815980] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3ab71d-2142-4ac2-a38b-14759262c45b tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.976s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.821784] env[69994]: INFO nova.scheduler.client.report [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Deleted allocations for instance 3c814c83-20cc-4871-9f30-5c0c7d99b8a1 [ 854.073858] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925556, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.111629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "refresh_cache-9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.112112] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance network_info: |[{"id": "429b3de3-f864-4315-a305-15e8cd0c31fb", "address": "fa:16:3e:7a:a6:9e", "network": {"id": "8f28d0df-43ec-4e28-b4e5-7007b665b70f", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1881204323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3185ee244cc74a8896b062af9d4e1478", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429b3de3-f8", "ovs_interfaceid": "429b3de3-f864-4315-a305-15e8cd0c31fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 854.112601] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:a6:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '429b3de3-f864-4315-a305-15e8cd0c31fb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.120844] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.121020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.121263] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03a02b33-9d28-4fea-a944-5dbcfb96e030 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.150316] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.150316] env[69994]: value = "task-2925558" [ 854.150316] env[69994]: _type = "Task" [ 854.150316] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.161813] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925558, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.255510] env[69994]: DEBUG oslo_vmware.api [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925557, 'name': PowerOnVM_Task, 'duration_secs': 0.466471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.255826] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.255961] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5935877f-6da0-4390-9184-8bf27acd5544 tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance 'f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 854.334062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a416ab11-a26f-4526-b575-8e41b0593e76 tempest-ServerMetadataNegativeTestJSON-1746799822 tempest-ServerMetadataNegativeTestJSON-1746799822-project-member] Lock "3c814c83-20cc-4871-9f30-5c0c7d99b8a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.885s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.575106] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925556, 'name': CloneVM_Task, 'duration_secs': 1.210077} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.575429] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Created linked-clone VM from snapshot [ 854.576272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f45fb7-1ba4-466e-ab92-c5341624aac7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.586809] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Uploading image 136e080b-9934-48bc-87a1-4505254582a3 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 854.613848] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 854.613848] env[69994]: value = "vm-587496" [ 854.613848] env[69994]: _type = "VirtualMachine" [ 854.613848] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 854.614148] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-123f5cfa-eabf-42ee-8d2f-7b550288e69b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.621167] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease: (returnval){ [ 854.621167] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523801e9-7cb6-0a1d-a267-99eef3a8db67" [ 854.621167] env[69994]: _type = "HttpNfcLease" [ 854.621167] env[69994]: } obtained for exporting VM: (result){ [ 854.621167] env[69994]: value = "vm-587496" [ 854.621167] env[69994]: _type = "VirtualMachine" [ 854.621167] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 854.621525] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the lease: (returnval){ [ 854.621525] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523801e9-7cb6-0a1d-a267-99eef3a8db67" [ 854.621525] env[69994]: _type = "HttpNfcLease" [ 854.621525] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 854.627351] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 854.627351] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523801e9-7cb6-0a1d-a267-99eef3a8db67" [ 854.627351] env[69994]: _type = "HttpNfcLease" [ 854.627351] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 854.660101] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925558, 'name': CreateVM_Task, 'duration_secs': 0.445911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.660101] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.661418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.661418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.661418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 854.661418] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aaee9ac-257b-4d28-93ff-27bfe02135a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.666638] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 854.666638] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237fafa-4897-b18a-0be3-5d902ad6f289" [ 854.666638] env[69994]: _type = "Task" [ 854.666638] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.674119] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237fafa-4897-b18a-0be3-5d902ad6f289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.134198] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.134198] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523801e9-7cb6-0a1d-a267-99eef3a8db67" [ 855.134198] env[69994]: _type = "HttpNfcLease" [ 855.134198] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 855.134198] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 855.134198] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523801e9-7cb6-0a1d-a267-99eef3a8db67" [ 855.134198] env[69994]: _type = "HttpNfcLease" [ 855.134198] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 855.135096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c31a593-9f9c-401e-a2e2-adc181cda446 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.145735] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 855.146033] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 855.221682] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5237fafa-4897-b18a-0be3-5d902ad6f289, 'name': SearchDatastore_Task, 'duration_secs': 0.041987} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.222478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.222772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.223083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.223520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.223598] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.223901] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbdaca91-0d5b-4fa2-a150-5ae58f71d20a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.227146] env[69994]: DEBUG nova.compute.manager [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.231334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908853ee-4f19-4d7c-8a9d-800a7a171ed9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.243411] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.243665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.244684] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a518060-a78b-4891-b9e1-20defb9e9a7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.250369] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 855.250369] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d1d9d9-0ba9-fff4-561b-b18a1268e21d" [ 855.250369] env[69994]: _type = "Task" [ 855.250369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.261986] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d1d9d9-0ba9-fff4-561b-b18a1268e21d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.271274] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-50097b36-9310-4e6c-b976-f2f366a2517b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.302688] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3981a7a-3848-4331-b217-23441ad2caaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.314698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ccdc1-66b0-432e-8280-8efd59e26c71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.354036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a925b82b-9563-4e03-a8e5-68d0866c7c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.363422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94423950-d94e-4f98-b2d4-2775a505f446 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.380601] env[69994]: DEBUG nova.compute.provider_tree [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.746061] env[69994]: INFO nova.compute.manager [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] instance snapshotting [ 855.750138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7572308-d991-4824-b2ed-84b36d2cfc1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.793075] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b314a1-3111-4717-aae3-ebb62c3ff49e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.813579] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d1d9d9-0ba9-fff4-561b-b18a1268e21d, 'name': SearchDatastore_Task, 'duration_secs': 0.010157} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.816814] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-411226cd-dc91-40d1-a204-a35ff22ba707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.831790] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 855.831790] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52668802-b100-80c0-a993-2f60de423d41" [ 855.831790] env[69994]: _type = "Task" [ 855.831790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.844260] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52668802-b100-80c0-a993-2f60de423d41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.887582] env[69994]: DEBUG nova.scheduler.client.report [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.332700] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 856.333892] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-664c42a0-886c-415d-a66e-806140be32e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.351994] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52668802-b100-80c0-a993-2f60de423d41, 'name': SearchDatastore_Task, 'duration_secs': 0.018868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.354126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.354570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f/9ce0d8da-2366-469a-82cf-f2dcd4c7e44f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.355755] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 856.355755] env[69994]: value = "task-2925560" [ 856.355755] env[69994]: _type = "Task" [ 856.355755] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.355755] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00d87209-608f-4d6c-8cef-e36eec8aaf8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.371669] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925560, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.373682] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 856.373682] env[69994]: value = "task-2925561" [ 856.373682] env[69994]: _type = "Task" [ 856.373682] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.385769] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.397070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.399857] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 856.408692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.897s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.409107] env[69994]: DEBUG nova.objects.instance [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lazy-loading 'resources' on Instance uuid 9269e42b-b05c-4c88-9008-aaeda4b0248f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.499612] env[69994]: DEBUG nova.network.neutron [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Port 885142d2-3a31-487c-b773-a0b0df2e4e40 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 856.499951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.500175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.500401] env[69994]: DEBUG nova.network.neutron [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.873060] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925560, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.886979] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925561, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.887270] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f/9ce0d8da-2366-469a-82cf-f2dcd4c7e44f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 856.887493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.887773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04c71aac-3e74-465f-9a90-18ac40178d5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.894635] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 856.894635] env[69994]: value = "task-2925562" [ 856.894635] env[69994]: _type = "Task" [ 856.894635] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.905457] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.910780] env[69994]: DEBUG nova.compute.utils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 856.912415] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 856.912599] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.012909] env[69994]: DEBUG nova.policy [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7be902c21aba40e1ac159ffa787eea04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d72179a46b64984b9ef219161bfcd76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 857.374426] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925560, 'name': CreateSnapshot_Task, 'duration_secs': 0.713406} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.375032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 857.375994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6af70f0-2247-4d43-9774-299e6f47aed0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.383416] env[69994]: DEBUG nova.network.neutron [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.411940] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.413708] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.414919] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8968ca6-e016-4518-98ed-66ad17749855 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.434778] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 857.448088] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f/9ce0d8da-2366-469a-82cf-f2dcd4c7e44f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.451983] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01ddc0d9-e2ea-45f3-8992-d2f038ddf66a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.478818] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 857.478818] env[69994]: value = "task-2925563" [ 857.478818] env[69994]: _type = "Task" [ 857.478818] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.485509] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.509173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c629879-fb36-438f-b53a-828f0858e4e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.517435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61e5ecc-886c-446f-95f4-32bdcba83ed4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.554137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167f09e7-3d43-4495-a9cf-e7f3be31d0c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.563690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc0081a-931e-4a08-a6ad-43a164d6bbd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.581753] env[69994]: DEBUG nova.compute.provider_tree [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.671555] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Successfully created port: 381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.890597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.918032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 857.918032] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-58563e63-9ba4-4b1f-8c0e-400207e58147 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.923792] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 857.923792] env[69994]: value = "task-2925564" [ 857.923792] env[69994]: _type = "Task" [ 857.923792] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.937815] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925564, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.985951] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925563, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.086545] env[69994]: DEBUG nova.scheduler.client.report [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.395565] env[69994]: DEBUG nova.compute.manager [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 858.396396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.439946] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925564, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.458024] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 858.490960] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925563, 'name': ReconfigVM_Task, 'duration_secs': 0.704912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.491461] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f/9ce0d8da-2366-469a-82cf-f2dcd4c7e44f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.494776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa8ed6fa-dc17-4b77-a887-ced71c7039ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.504443] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 858.504443] env[69994]: value = "task-2925565" [ 858.504443] env[69994]: _type = "Task" [ 858.504443] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.513963] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 858.514348] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.514580] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 858.515168] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.515425] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 858.515665] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 858.515979] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 858.516267] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 858.516645] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 858.516945] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 858.517237] env[69994]: DEBUG nova.virt.hardware [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 858.518656] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03f814c-9895-425f-86b4-d951cb449100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.534031] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925565, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.540786] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9e4130-4b58-4b6e-a7af-c3f434b28862 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.594915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.598871] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.004s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.598871] env[69994]: DEBUG nova.objects.instance [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 858.626980] env[69994]: INFO nova.scheduler.client.report [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted allocations for instance 9269e42b-b05c-4c88-9008-aaeda4b0248f [ 858.939703] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925564, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.014606] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925565, 'name': Rename_Task, 'duration_secs': 0.214098} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.015345] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.015761] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e6ace12-7369-45f2-bdd1-f2b5664b842f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.023387] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 859.023387] env[69994]: value = "task-2925566" [ 859.023387] env[69994]: _type = "Task" [ 859.023387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.032932] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.095097] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.095097] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.135757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ac95efa9-cf11-4448-ad4e-2effd82f48f4 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "9269e42b-b05c-4c88-9008-aaeda4b0248f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.082s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.441332] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925564, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.534684] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.596325] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 859.613640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54d42782-01be-4ec0-babb-029d6e10d3f6 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.614828] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.145s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.615090] env[69994]: DEBUG nova.objects.instance [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lazy-loading 'resources' on Instance uuid f6408fad-a6b8-4868-a192-3acd065935ea {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.717557] env[69994]: DEBUG nova.compute.manager [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Received event network-vif-plugged-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.717817] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] Acquiring lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.718082] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.718257] env[69994]: DEBUG oslo_concurrency.lockutils [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.718445] env[69994]: DEBUG nova.compute.manager [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] No waiting events found dispatching network-vif-plugged-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.718614] env[69994]: WARNING nova.compute.manager [req-1cb8ac01-4485-4fc4-8cf1-0d60373e7ad2 req-60d73b39-a587-45bb-ac7b-44ef5a20d157 service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Received unexpected event network-vif-plugged-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 for instance with vm_state building and task_state spawning. [ 859.812944] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Successfully updated port: 381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.937488] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925564, 'name': CloneVM_Task, 'duration_secs': 1.819585} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.937853] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Created linked-clone VM from snapshot [ 859.939035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1157136d-f4aa-4e39-a92d-193faf082fa1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.946948] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Uploading image fcaca459-faaa-4345-b27b-5e17d562c798 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.983722] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.983722] env[69994]: value = "vm-587499" [ 859.983722] env[69994]: _type = "VirtualMachine" [ 859.983722] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.984972] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2473e387-cf78-4f21-8d3d-2602e3b7d95c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.991787] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lease: (returnval){ [ 859.991787] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222cf41-f67a-fcd6-3446-36eb0179eaba" [ 859.991787] env[69994]: _type = "HttpNfcLease" [ 859.991787] env[69994]: } obtained for exporting VM: (result){ [ 859.991787] env[69994]: value = "vm-587499" [ 859.991787] env[69994]: _type = "VirtualMachine" [ 859.991787] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.992270] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the lease: (returnval){ [ 859.992270] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222cf41-f67a-fcd6-3446-36eb0179eaba" [ 859.992270] env[69994]: _type = "HttpNfcLease" [ 859.992270] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 860.002953] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 860.002953] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222cf41-f67a-fcd6-3446-36eb0179eaba" [ 860.002953] env[69994]: _type = "HttpNfcLease" [ 860.002953] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 860.036086] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.130175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.318670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.318670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.318989] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.503661] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 860.503661] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222cf41-f67a-fcd6-3446-36eb0179eaba" [ 860.503661] env[69994]: _type = "HttpNfcLease" [ 860.503661] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 860.504208] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 860.504208] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222cf41-f67a-fcd6-3446-36eb0179eaba" [ 860.504208] env[69994]: _type = "HttpNfcLease" [ 860.504208] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 860.504772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbbec17-c5c7-478b-b336-4409ac1b8670 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.513020] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 860.514599] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 860.603203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "b80a405e-a02e-4b18-a325-753146533d1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.603203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.610556] env[69994]: DEBUG oslo_vmware.api [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925566, 'name': PowerOnVM_Task, 'duration_secs': 1.239742} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.611341] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.611341] env[69994]: INFO nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Took 10.07 seconds to spawn the instance on the hypervisor. [ 860.611341] env[69994]: DEBUG nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.612081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f215bb-1812-4076-a565-b3ead254dc6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.656552] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-078489dc-3921-4132-a08e-0ff31546238c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.739876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970475a9-2f57-46eb-8612-21f068d2f98c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.748082] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5655c73-8945-4ef6-958f-9fb87e06e25d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.792710] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a94afc-ff9b-49d9-9436-559e7aacc538 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.800267] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20e2ec9-69dc-420b-874f-95f2d9e09ae8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.814871] env[69994]: DEBUG nova.compute.provider_tree [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 860.870947] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.055941] env[69994]: DEBUG nova.network.neutron [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updating instance_info_cache with network_info: [{"id": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "address": "fa:16:3e:34:b8:87", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f1b5f-fb", "ovs_interfaceid": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.105905] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 861.144359] env[69994]: INFO nova.compute.manager [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Took 48.79 seconds to build instance. [ 861.351492] env[69994]: ERROR nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] [req-4785b418-ca03-415d-a7cd-7481fcb7decf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4785b418-ca03-415d-a7cd-7481fcb7decf"}]} [ 861.380057] env[69994]: DEBUG nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 861.407017] env[69994]: DEBUG nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 861.407017] env[69994]: DEBUG nova.compute.provider_tree [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.429046] env[69994]: DEBUG nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 861.455408] env[69994]: DEBUG nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 861.564120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.564473] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Instance network_info: |[{"id": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "address": "fa:16:3e:34:b8:87", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f1b5f-fb", "ovs_interfaceid": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 861.565071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:b8:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '381f1b5f-fbf6-499e-afb0-d63ec11e7e21', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.576108] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.578477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.579238] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66ac5671-ccc1-461d-a239-85b22bfece22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.609241] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.609241] env[69994]: value = "task-2925568" [ 861.609241] env[69994]: _type = "Task" [ 861.609241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.627458] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925568, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.638223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.647295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-38295e53-4507-48c1-b365-799d186eb41d tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.161s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.777281] env[69994]: DEBUG nova.compute.manager [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Received event network-changed-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.777489] env[69994]: DEBUG nova.compute.manager [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Refreshing instance network info cache due to event network-changed-381f1b5f-fbf6-499e-afb0-d63ec11e7e21. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 861.777843] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] Acquiring lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.777948] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] Acquired lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.778467] env[69994]: DEBUG nova.network.neutron [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Refreshing network info cache for port 381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.990809] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01535c6e-c3bc-430a-bc16-fae43751fd88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.999334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f069d466-6124-4fb0-97df-f6542376b416 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.035138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce52de3-ceb3-460e-be58-92e1f679a273 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.043612] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7af05a6-4b8c-4a67-ad3d-3807a40351e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.059910] env[69994]: DEBUG nova.compute.provider_tree [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.120388] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925568, 'name': CreateVM_Task, 'duration_secs': 0.397847} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.120669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.121376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.121544] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.121875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.122158] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb3cd88-ff60-44fe-9bc6-84861522314c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.126978] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 862.126978] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222c846-7e11-d483-1ef3-92a8d6ef2a76" [ 862.126978] env[69994]: _type = "Task" [ 862.126978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.135530] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222c846-7e11-d483-1ef3-92a8d6ef2a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.561750] env[69994]: DEBUG nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.639620] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5222c846-7e11-d483-1ef3-92a8d6ef2a76, 'name': SearchDatastore_Task, 'duration_secs': 0.014177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.640047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.640358] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.640669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.640843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.641055] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.641414] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3e2a911-9e29-44b0-8ff6-a2eba6884a1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.652340] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.652550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.653410] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-938935d1-20e9-4dd5-b070-b64386e08d77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.660358] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 862.660358] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5287a3e9-114e-967c-952e-8217d84b1150" [ 862.660358] env[69994]: _type = "Task" [ 862.660358] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.665217] env[69994]: DEBUG nova.network.neutron [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updated VIF entry in instance network info cache for port 381f1b5f-fbf6-499e-afb0-d63ec11e7e21. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.665562] env[69994]: DEBUG nova.network.neutron [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updating instance_info_cache with network_info: [{"id": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "address": "fa:16:3e:34:b8:87", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f1b5f-fb", "ovs_interfaceid": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.674533] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5287a3e9-114e-967c-952e-8217d84b1150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.067753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.453s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.070919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.198s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.072444] env[69994]: DEBUG nova.objects.instance [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lazy-loading 'resources' on Instance uuid 75e952e7-6761-49a4-9193-175f5d30494e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.101341] env[69994]: INFO nova.scheduler.client.report [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Deleted allocations for instance f6408fad-a6b8-4868-a192-3acd065935ea [ 863.168670] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cb7a26e-fefb-4b7e-b1c2-27fe607d9915 req-d7c3c473-b44c-4b53-9b9f-3ec4434597fe service nova] Releasing lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.178350] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5287a3e9-114e-967c-952e-8217d84b1150, 'name': SearchDatastore_Task, 'duration_secs': 0.014544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.179522] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efa156ee-a499-46ca-a20a-4d498ec30ceb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.187351] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 863.187351] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a692e0-f5fa-8c08-1ecc-457f66f9d1d8" [ 863.187351] env[69994]: _type = "Task" [ 863.187351] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.199054] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a692e0-f5fa-8c08-1ecc-457f66f9d1d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.219190] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 863.219743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7077bd-5319-4134-bad0-976d28801475 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.228218] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 863.228218] env[69994]: ERROR oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk due to incomplete transfer. [ 863.228218] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b5d7692f-cf8d-4375-a4e5-d86e765a2214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.238418] env[69994]: DEBUG oslo_vmware.rw_handles [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa2b3e-d967-82c1-8080-027c12199fe8/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 863.238418] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Uploaded image 136e080b-9934-48bc-87a1-4505254582a3 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 863.240126] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 863.240530] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-87d4fcd2-3011-44b0-afbc-4f0552e6bd30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.249735] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 863.249735] env[69994]: value = "task-2925569" [ 863.249735] env[69994]: _type = "Task" [ 863.249735] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.265120] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925569, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.612831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b87498c8-4c88-4716-a855-e05815f7105c tempest-ServersTestMultiNic-1926047732 tempest-ServersTestMultiNic-1926047732-project-member] Lock "f6408fad-a6b8-4868-a192-3acd065935ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.479s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.701223] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a692e0-f5fa-8c08-1ecc-457f66f9d1d8, 'name': SearchDatastore_Task, 'duration_secs': 0.019262} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.701598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.701869] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5acdf02b-f61c-46ff-9c36-8e86b9be7738/5acdf02b-f61c-46ff-9c36-8e86b9be7738.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.702236] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-800855ae-81ef-411b-81ea-5a1976edd2e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.712985] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 863.712985] env[69994]: value = "task-2925570" [ 863.712985] env[69994]: _type = "Task" [ 863.712985] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.722727] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.764801] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925569, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.127167] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aec6c0-fbd8-4bec-87a0-6c667b83918d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.139999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b004ffaa-f9e4-47e8-bcb9-51b438161f90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.180378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0051818-616c-4dd5-a0b5-65077e21a7e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.188789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ff26f4-1545-4aa6-88ba-d00ede88959a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.205424] env[69994]: DEBUG nova.compute.provider_tree [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.227600] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925570, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.264046] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925569, 'name': Destroy_Task, 'duration_secs': 0.739809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.264360] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Destroyed the VM [ 864.264891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 864.264891] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5543106d-5373-4723-b54b-16e837a86f49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.276022] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 864.276022] env[69994]: value = "task-2925571" [ 864.276022] env[69994]: _type = "Task" [ 864.276022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.294212] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925571, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.709780] env[69994]: DEBUG nova.scheduler.client.report [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.723239] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.78706} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.723486] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5acdf02b-f61c-46ff-9c36-8e86b9be7738/5acdf02b-f61c-46ff-9c36-8e86b9be7738.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.723724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.723956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4bf73f6-ee22-4231-8765-75bc5de05cd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.732898] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 864.732898] env[69994]: value = "task-2925572" [ 864.732898] env[69994]: _type = "Task" [ 864.732898] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.743549] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.787502] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925571, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.913263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "309e5014-a43f-4346-9c11-036eb36c8c1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.913575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.950707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.950953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.113179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.113506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.113753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.113860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.114013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.116253] env[69994]: INFO nova.compute.manager [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Terminating instance [ 865.214868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.217495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.237s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.217723] env[69994]: DEBUG nova.objects.instance [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 865.244066] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094637} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.245136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.245691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef11ff42-dfd6-4e52-8d63-f5d87c67d7fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.249453] env[69994]: INFO nova.scheduler.client.report [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Deleted allocations for instance 75e952e7-6761-49a4-9193-175f5d30494e [ 865.276134] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 5acdf02b-f61c-46ff-9c36-8e86b9be7738/5acdf02b-f61c-46ff-9c36-8e86b9be7738.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.277170] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8de2012e-4224-45c6-a0a3-493cebdba6d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.301555] env[69994]: DEBUG oslo_vmware.api [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925571, 'name': RemoveSnapshot_Task, 'duration_secs': 0.783599} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.302827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 865.303124] env[69994]: INFO nova.compute.manager [None req-0504e863-53f4-4202-b8e9-a7493d1172b2 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Took 14.84 seconds to snapshot the instance on the hypervisor. [ 865.306035] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 865.306035] env[69994]: value = "task-2925573" [ 865.306035] env[69994]: _type = "Task" [ 865.306035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.316296] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925573, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.420517] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 865.457750] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 865.621737] env[69994]: DEBUG nova.compute.manager [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 865.621737] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.622154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80497402-fa47-4af6-9f6a-9adf0cfe370b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.632206] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.632400] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c917cf7e-f45a-4e07-8c43-b9f2d3d4e9dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.638896] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 865.638896] env[69994]: value = "task-2925574" [ 865.638896] env[69994]: _type = "Task" [ 865.638896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.648437] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925574, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.784494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-467e3c4e-42e5-4ee4-be82-6ed98953e311 tempest-ServersTestFqdnHostnames-1854434547 tempest-ServersTestFqdnHostnames-1854434547-project-member] Lock "75e952e7-6761-49a4-9193-175f5d30494e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.673s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.826191] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925573, 'name': ReconfigVM_Task, 'duration_secs': 0.434044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.826481] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 5acdf02b-f61c-46ff-9c36-8e86b9be7738/5acdf02b-f61c-46ff-9c36-8e86b9be7738.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.827275] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-642a95f1-9d77-4ee8-b526-a9a2e6d3c864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.834063] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 865.834063] env[69994]: value = "task-2925575" [ 865.834063] env[69994]: _type = "Task" [ 865.834063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.846777] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925575, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.950781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.981102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.150506] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925574, 'name': PowerOffVM_Task, 'duration_secs': 0.223325} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.151075] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.151363] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.151721] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bb59c9e-4601-4adb-a259-45f2973379a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.218078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.218078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.218078] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleting the datastore file [datastore1] 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.218078] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4f3450d-4ca1-45ca-9993-682531400356 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.225023] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 866.225023] env[69994]: value = "task-2925577" [ 866.225023] env[69994]: _type = "Task" [ 866.225023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.228467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc531b2a-8b38-4535-82b6-6e48f2fba293 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.230100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.019s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.230860] env[69994]: DEBUG nova.objects.instance [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lazy-loading 'resources' on Instance uuid 203bc0d6-c149-4c3d-9ac7-962210d6b01d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.236784] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.347379] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925575, 'name': Rename_Task, 'duration_secs': 0.180724} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.350261] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.350261] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cad75878-15c9-4960-94dc-6919419beb4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.355552] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 866.355552] env[69994]: value = "task-2925578" [ 866.355552] env[69994]: _type = "Task" [ 866.355552] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.734397] env[69994]: DEBUG oslo_vmware.api [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13231} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.734665] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 866.734950] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 866.735087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.735259] env[69994]: INFO nova.compute.manager [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 866.735504] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.735699] env[69994]: DEBUG nova.compute.manager [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 866.735787] env[69994]: DEBUG nova.network.neutron [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.869720] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925578, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.204283] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9825d532-0751-4056-979e-722568571dc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.212134] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1830cf5-753a-41ae-a6e9-1c6b1aedc0c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.248444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0880fb66-4862-4bbf-be83-2f8509a3d7e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.258973] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d13bca-4933-4229-a421-b458c50ff9d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.274033] env[69994]: DEBUG nova.compute.provider_tree [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.367545] env[69994]: DEBUG oslo_vmware.api [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2925578, 'name': PowerOnVM_Task, 'duration_secs': 0.624031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.367811] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.368082] env[69994]: INFO nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Took 8.91 seconds to spawn the instance on the hypervisor. [ 867.368268] env[69994]: DEBUG nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 867.369082] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f9ca45-71aa-46b4-9caf-fa2c3b162f91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.419134] env[69994]: DEBUG nova.compute.manager [req-c1764a2c-14d3-43e8-814c-c404a047884c req-f197c47f-9106-4f22-af5d-77105169c9f4 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Received event network-vif-deleted-429b3de3-f864-4315-a305-15e8cd0c31fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.419134] env[69994]: INFO nova.compute.manager [req-c1764a2c-14d3-43e8-814c-c404a047884c req-f197c47f-9106-4f22-af5d-77105169c9f4 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Neutron deleted interface 429b3de3-f864-4315-a305-15e8cd0c31fb; detaching it from the instance and deleting it from the info cache [ 867.419281] env[69994]: DEBUG nova.network.neutron [req-c1764a2c-14d3-43e8-814c-c404a047884c req-f197c47f-9106-4f22-af5d-77105169c9f4 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.778343] env[69994]: DEBUG nova.scheduler.client.report [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.893750] env[69994]: INFO nova.compute.manager [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Took 49.41 seconds to build instance. [ 867.897942] env[69994]: DEBUG nova.network.neutron [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.923059] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a480481f-bbb7-4737-9679-760f60cab3f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.937870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47afb348-e2a2-4132-b0e4-bdf233ec1ae5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.981800] env[69994]: DEBUG nova.compute.manager [req-c1764a2c-14d3-43e8-814c-c404a047884c req-f197c47f-9106-4f22-af5d-77105169c9f4 service nova] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Detach interface failed, port_id=429b3de3-f864-4315-a305-15e8cd0c31fb, reason: Instance 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 868.286700] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.295978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.295978] env[69994]: DEBUG nova.objects.instance [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lazy-loading 'resources' on Instance uuid 67f5ad56-9455-43fc-b940-8a67974703cc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.319579] env[69994]: INFO nova.scheduler.client.report [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted allocations for instance 203bc0d6-c149-4c3d-9ac7-962210d6b01d [ 868.397058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1229ca88-1ad8-486b-9ba6-b12edbd38017 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.404387] env[69994]: INFO nova.compute.manager [-] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Took 1.67 seconds to deallocate network for instance. [ 868.795506] env[69994]: DEBUG nova.compute.manager [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Received event network-changed-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.795506] env[69994]: DEBUG nova.compute.manager [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Refreshing instance network info cache due to event network-changed-381f1b5f-fbf6-499e-afb0-d63ec11e7e21. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 868.795506] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] Acquiring lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.795506] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] Acquired lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.796547] env[69994]: DEBUG nova.network.neutron [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Refreshing network info cache for port 381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.829304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77e11dfb-fd1d-438d-bcb6-1d0c212910b7 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "203bc0d6-c149-4c3d-9ac7-962210d6b01d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.046s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.910698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.095230] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 869.096197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f40b6b-0848-4c66-b4d9-fe9cf73f1473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.104878] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 869.105098] env[69994]: ERROR oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk due to incomplete transfer. [ 869.105348] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-be2abbd5-8947-46ff-81e7-8af316be66f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.112346] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fba033-df1e-68c4-88b2-5de981a90494/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 869.112588] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Uploaded image fcaca459-faaa-4345-b27b-5e17d562c798 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 869.114275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 869.117153] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b7281218-dd34-4441-a959-073159d86e44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.124971] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 869.124971] env[69994]: value = "task-2925579" [ 869.124971] env[69994]: _type = "Task" [ 869.124971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.140606] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925579, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.249931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b61fbf-8f5f-4e66-96c6-a980e19ecbaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.258926] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea3e26d-b2a3-4f8b-9e80-b64fa1823bcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.295822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe87ae85-a8c0-432a-822f-b320ca8e571f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.306670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294fb6c7-5d4d-41e6-8f4d-8519dee835ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.324077] env[69994]: DEBUG nova.compute.provider_tree [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.612559] env[69994]: DEBUG nova.network.neutron [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updated VIF entry in instance network info cache for port 381f1b5f-fbf6-499e-afb0-d63ec11e7e21. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.613413] env[69994]: DEBUG nova.network.neutron [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updating instance_info_cache with network_info: [{"id": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "address": "fa:16:3e:34:b8:87", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f1b5f-fb", "ovs_interfaceid": "381f1b5f-fbf6-499e-afb0-d63ec11e7e21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.636126] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925579, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.830569] env[69994]: DEBUG nova.scheduler.client.report [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.115484] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3a46f98-ed71-4cf3-9c24-70118f7e7dcf req-8a91dbd6-59c1-44b2-ad69-49b8ab0ed94f service nova] Releasing lock "refresh_cache-5acdf02b-f61c-46ff-9c36-8e86b9be7738" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.136385] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925579, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.334966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.338087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.918s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.339921] env[69994]: INFO nova.compute.claims [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.451600] env[69994]: INFO nova.scheduler.client.report [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Deleted allocations for instance 67f5ad56-9455-43fc-b940-8a67974703cc [ 870.637050] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925579, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.962025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-450bd9f8-4c94-4b90-bb69-1e021b992f18 tempest-MultipleCreateTestJSON-1978118658 tempest-MultipleCreateTestJSON-1978118658-project-member] Lock "67f5ad56-9455-43fc-b940-8a67974703cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.961s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.142902] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925579, 'name': Destroy_Task, 'duration_secs': 1.6182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.143208] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Destroyed the VM [ 871.143442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 871.143710] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-79d76550-36e4-4fff-aa08-402a230438e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.150363] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 871.150363] env[69994]: value = "task-2925580" [ 871.150363] env[69994]: _type = "Task" [ 871.150363] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.158947] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925580, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.669941] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925580, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.742800] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ca55dc-f735-4706-842a-6a05687e621c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.748812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e077c3e-4911-4b53-a55b-c33ca640502d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.785308] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff8672a-2d32-4907-8e0f-2bde5f9cd5ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.793512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84324049-b027-4d9f-8919-7699d8b62c5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.810633] env[69994]: DEBUG nova.compute.provider_tree [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.171979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.172302] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.173404] env[69994]: DEBUG oslo_vmware.api [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925580, 'name': RemoveSnapshot_Task, 'duration_secs': 0.954088} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.173843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 872.174080] env[69994]: INFO nova.compute.manager [None req-b798a792-0642-486b-9044-b266b6d009b1 tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 16.42 seconds to snapshot the instance on the hypervisor. [ 872.314131] env[69994]: DEBUG nova.scheduler.client.report [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.674329] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.821580] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.822128] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 872.825964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.019s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.826137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.826301] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 872.826593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.871s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.826800] env[69994]: DEBUG nova.objects.instance [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lazy-loading 'resources' on Instance uuid 956306bc-4701-4c04-8221-8ec0b9df73ca {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.828855] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de31ee66-cde3-431d-a9f7-62bfa39c4c04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.838853] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef521d32-64e6-4afd-97ab-d41f6beead64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.855716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360fcec9-1fc4-4d78-8865-8b4fd1ff5de5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.863357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53849a5-994a-4898-8f00-d64cc4d09dbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.895456] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178226MB free_disk=157GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 872.895617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.204445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.334239] env[69994]: DEBUG nova.compute.utils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.338683] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 873.338876] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.386664] env[69994]: DEBUG nova.policy [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.744169] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Successfully created port: 4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.823084] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace00ecf-098b-400f-9205-c9e5860a86a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.829607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62082ef6-a0bd-4288-aca1-fd8f18ec05bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.863405] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 873.867712] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3205cfb9-1d4a-4eda-97ad-181901a12f11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.876769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b25f31-8e20-44e9-a7ec-73b288d97b74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.891521] env[69994]: DEBUG nova.compute.provider_tree [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.415950] env[69994]: ERROR nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [req-9dcf16b2-39b3-4eb1-b6aa-890af4125202] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9dcf16b2-39b3-4eb1-b6aa-890af4125202"}]} [ 874.435535] env[69994]: DEBUG nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 874.456510] env[69994]: DEBUG nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 874.456749] env[69994]: DEBUG nova.compute.provider_tree [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.468619] env[69994]: DEBUG nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 874.489527] env[69994]: DEBUG nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 874.720478] env[69994]: DEBUG nova.compute.manager [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.722357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4045a293-ec3b-4c7d-8724-9b67d15e332e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.876236] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 874.907979] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 874.908429] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.908598] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.908780] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.908926] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.909088] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 874.909494] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 874.909693] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 874.909981] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 874.910761] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 874.910761] env[69994]: DEBUG nova.virt.hardware [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 874.911519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12950ff-4adb-448b-9580-868964be24a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.924482] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34435a06-5c30-4db7-a68a-0a30bcd79937 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.944756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32802362-a22e-443f-8611-2b3ed0f39947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.952834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b10f96a-a5a2-4805-be94-46dc0ef194f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.984461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285d1c65-29e9-4a66-933e-d21dc4b6f127 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.997721] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad16dd4-b911-4cd5-a31c-a3d0c38c75ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.012698] env[69994]: DEBUG nova.compute.provider_tree [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.175389] env[69994]: DEBUG nova.compute.manager [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Received event network-vif-plugged-4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.175612] env[69994]: DEBUG oslo_concurrency.lockutils [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] Acquiring lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.176208] env[69994]: DEBUG oslo_concurrency.lockutils [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] Lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.176443] env[69994]: DEBUG oslo_concurrency.lockutils [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] Lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.176701] env[69994]: DEBUG nova.compute.manager [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] No waiting events found dispatching network-vif-plugged-4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.177025] env[69994]: WARNING nova.compute.manager [req-75ac803c-bb19-4a22-8e9c-4c3f86ff1e93 req-9a4eb7b5-c461-4371-8e9f-716a541e5a96 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Received unexpected event network-vif-plugged-4da807e4-2d99-47d8-a155-20d29693f532 for instance with vm_state building and task_state spawning. [ 875.240033] env[69994]: INFO nova.compute.manager [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] instance snapshotting [ 875.244451] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efddc26-9cb9-4466-aa77-b59f0ab3b6b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.266065] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a85b3d-ffad-413d-894a-f0e15a88e9be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.281954] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Successfully updated port: 4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.557258] env[69994]: DEBUG nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 875.557706] env[69994]: DEBUG nova.compute.provider_tree [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 81 to 82 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 875.557991] env[69994]: DEBUG nova.compute.provider_tree [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.778260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 875.778644] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ace8369f-b0f4-4ebd-9f36-af11b85b62ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.784554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.784980] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.784980] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.788598] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 875.788598] env[69994]: value = "task-2925581" [ 875.788598] env[69994]: _type = "Task" [ 875.788598] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.797862] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925581, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.067341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.240s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.071966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.073334] env[69994]: INFO nova.compute.claims [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.094445] env[69994]: INFO nova.scheduler.client.report [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted allocations for instance 956306bc-4701-4c04-8221-8ec0b9df73ca [ 876.304832] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925581, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.339101] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.353432] env[69994]: INFO nova.compute.manager [None req-a9e09020-0080-4928-b082-baf1323b4225 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Get console output [ 876.353677] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-a9e09020-0080-4928-b082-baf1323b4225 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] The console log is missing. Check your VSPC configuration [ 876.493095] env[69994]: DEBUG nova.network.neutron [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Updating instance_info_cache with network_info: [{"id": "4da807e4-2d99-47d8-a155-20d29693f532", "address": "fa:16:3e:d8:30:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da807e4-2d", "ovs_interfaceid": "4da807e4-2d99-47d8-a155-20d29693f532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.603519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841f96d2-eb55-423c-95c4-90626918f3a5 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "956306bc-4701-4c04-8221-8ec0b9df73ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.249s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.753176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "43119e21-5226-482c-b640-33e73051a563" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.753439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.808136] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925581, 'name': CreateSnapshot_Task, 'duration_secs': 0.601678} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.809369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 876.809369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382c8560-ecd5-4973-a961-41f1d3d042e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.996874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.997445] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Instance network_info: |[{"id": "4da807e4-2d99-47d8-a155-20d29693f532", "address": "fa:16:3e:d8:30:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da807e4-2d", "ovs_interfaceid": "4da807e4-2d99-47d8-a155-20d29693f532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 876.998203] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:30:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4da807e4-2d99-47d8-a155-20d29693f532', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.007667] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating folder: Project (0bbe936f4d284e73999846251269fefd). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.008323] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dc942d4-d6e2-4982-ac46-09f380b44645 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.018315] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created folder: Project (0bbe936f4d284e73999846251269fefd) in parent group-v587342. [ 877.018518] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating folder: Instances. Parent ref: group-v587502. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.018808] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c57aa9c7-146f-4a0b-afd9-f1506d452a4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.026641] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created folder: Instances in parent group-v587502. [ 877.026877] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.027078] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.027313] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e328d29e-17ec-4456-baed-e642e1859101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.049023] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.049023] env[69994]: value = "task-2925584" [ 877.049023] env[69994]: _type = "Task" [ 877.049023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.056804] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925584, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.237073] env[69994]: DEBUG nova.compute.manager [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Received event network-changed-4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.237073] env[69994]: DEBUG nova.compute.manager [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Refreshing instance network info cache due to event network-changed-4da807e4-2d99-47d8-a155-20d29693f532. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 877.238043] env[69994]: DEBUG oslo_concurrency.lockutils [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] Acquiring lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.238043] env[69994]: DEBUG oslo_concurrency.lockutils [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] Acquired lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.238043] env[69994]: DEBUG nova.network.neutron [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Refreshing network info cache for port 4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.256241] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 877.326528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 877.326746] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ec3f883f-e797-43a4-a1e4-7d1ab3426ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.339130] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 877.339130] env[69994]: value = "task-2925585" [ 877.339130] env[69994]: _type = "Task" [ 877.339130] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.348519] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925585, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.436862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.437243] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.437498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.437707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.437892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.444734] env[69994]: INFO nova.compute.manager [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Terminating instance [ 877.545800] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8037aaee-189e-4340-9b94-af951259e18c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.559097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5082f79-6ed1-4c8e-8ece-822aa6445038 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.562361] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925584, 'name': CreateVM_Task, 'duration_secs': 0.364083} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.562540] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.563577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.563750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.564082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 877.564356] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4016383-4c36-41ca-bb08-6b082a4962e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.590343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.590609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.590818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.591008] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.591190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.593955] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9558de65-f228-40e1-9283-69c343635e00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.597940] env[69994]: INFO nova.compute.manager [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Terminating instance [ 877.599753] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 877.599753] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524b8404-c325-35c0-a441-166a1bfa95ea" [ 877.599753] env[69994]: _type = "Task" [ 877.599753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.608682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87d13ce-f599-4a0c-b317-d9ae0a51dc91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.616189] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524b8404-c325-35c0-a441-166a1bfa95ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.616821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.617093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.617362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.617533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.617732] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.618111] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-771b85e0-53a7-42db-9902-6d720637e708 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.627687] env[69994]: DEBUG nova.compute.provider_tree [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.634978] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.635772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.636451] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e88c603d-59f8-40b5-b7a3-ffa583312d6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.642075] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 877.642075] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a0ca0-8a03-e713-5039-0c64041b2fb9" [ 877.642075] env[69994]: _type = "Task" [ 877.642075] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.650318] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a0ca0-8a03-e713-5039-0c64041b2fb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.781064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.849464] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925585, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.935946] env[69994]: DEBUG nova.network.neutron [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Updated VIF entry in instance network info cache for port 4da807e4-2d99-47d8-a155-20d29693f532. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.936357] env[69994]: DEBUG nova.network.neutron [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Updating instance_info_cache with network_info: [{"id": "4da807e4-2d99-47d8-a155-20d29693f532", "address": "fa:16:3e:d8:30:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4da807e4-2d", "ovs_interfaceid": "4da807e4-2d99-47d8-a155-20d29693f532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.948762] env[69994]: DEBUG nova.compute.manager [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.948969] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.950088] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a47ae8-cdc2-4dc7-9c53-fff991fff6b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.959549] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.959791] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37f6d4c5-3bba-44be-b127-60a35b850793 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.965967] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 877.965967] env[69994]: value = "task-2925586" [ 877.965967] env[69994]: _type = "Task" [ 877.965967] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.974132] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.102865] env[69994]: DEBUG nova.compute.manager [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.103129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.104080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5ee855-9acf-4f7d-9b48-471ac7a481d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.111701] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.112110] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8aab65d-b38b-4612-a444-f9066cc24fe1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.118340] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 878.118340] env[69994]: value = "task-2925587" [ 878.118340] env[69994]: _type = "Task" [ 878.118340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.126549] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.130576] env[69994]: DEBUG nova.scheduler.client.report [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.153391] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a0ca0-8a03-e713-5039-0c64041b2fb9, 'name': SearchDatastore_Task, 'duration_secs': 0.009165} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.154272] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d226634c-c824-44d2-98b8-8371a9e97858 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.160699] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 878.160699] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52387ec9-c962-7013-e66e-84f0f433e23c" [ 878.160699] env[69994]: _type = "Task" [ 878.160699] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.168742] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52387ec9-c962-7013-e66e-84f0f433e23c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.349834] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925585, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.439514] env[69994]: DEBUG oslo_concurrency.lockutils [req-d8686f84-9802-44d5-ab16-5d98049c520a req-2dc4a395-b823-41dd-897a-4dba6989dc01 service nova] Releasing lock "refresh_cache-06fa5ab5-baab-466e-8574-5391247c13a8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.475936] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925586, 'name': PowerOffVM_Task, 'duration_secs': 0.174349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.476665] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.476665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.476665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81d811cb-5a7d-4f23-8765-d98ce14b1573 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.534712] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.534952] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.535153] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Deleting the datastore file [datastore1] 9e9973e1-feb8-4fd7-95ae-e6d824af5a64 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.535435] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59b63032-4b25-483b-aaf4-91800e653571 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.541269] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for the task: (returnval){ [ 878.541269] env[69994]: value = "task-2925589" [ 878.541269] env[69994]: _type = "Task" [ 878.541269] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.549146] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.630025] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925587, 'name': PowerOffVM_Task, 'duration_secs': 0.191711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.630415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.630625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.630879] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-534af5c4-7f94-4ca9-a82b-f7bd415e71c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.635607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.636197] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.639019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.729s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.639248] env[69994]: DEBUG nova.objects.instance [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lazy-loading 'resources' on Instance uuid bb062ddc-5281-4957-bb9d-8f5c0b0ba526 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.671161] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52387ec9-c962-7013-e66e-84f0f433e23c, 'name': SearchDatastore_Task, 'duration_secs': 0.011379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.671433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.671730] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 06fa5ab5-baab-466e-8574-5391247c13a8/06fa5ab5-baab-466e-8574-5391247c13a8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.671986] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bd71599-07f0-4250-bfd9-55e7300af5e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.677909] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 878.677909] env[69994]: value = "task-2925591" [ 878.677909] env[69994]: _type = "Task" [ 878.677909] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.685405] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.708624] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.708844] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.708974] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore2] 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.709252] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-161cc8da-5c66-4f45-a137-c2cc32986f54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.717912] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 878.717912] env[69994]: value = "task-2925592" [ 878.717912] env[69994]: _type = "Task" [ 878.717912] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.724760] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.852747] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925585, 'name': CloneVM_Task, 'duration_secs': 1.264073} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.853795] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Created linked-clone VM from snapshot [ 878.853873] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ff3277-ba1a-40a7-a064-3967fcb23b1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.862827] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Uploading image 9630d505-0b21-4142-8042-3205fbfedde4 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 878.885462] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 878.885462] env[69994]: value = "vm-587505" [ 878.885462] env[69994]: _type = "VirtualMachine" [ 878.885462] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 878.885773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-44d8fb92-f7b2-43e3-9149-43dd26f71d46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.893084] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lease: (returnval){ [ 878.893084] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5240832d-3011-b613-c9fc-275f27c5d673" [ 878.893084] env[69994]: _type = "HttpNfcLease" [ 878.893084] env[69994]: } obtained for exporting VM: (result){ [ 878.893084] env[69994]: value = "vm-587505" [ 878.893084] env[69994]: _type = "VirtualMachine" [ 878.893084] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 878.893332] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the lease: (returnval){ [ 878.893332] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5240832d-3011-b613-c9fc-275f27c5d673" [ 878.893332] env[69994]: _type = "HttpNfcLease" [ 878.893332] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 878.900400] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 878.900400] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5240832d-3011-b613-c9fc-275f27c5d673" [ 878.900400] env[69994]: _type = "HttpNfcLease" [ 878.900400] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 879.053548] env[69994]: DEBUG oslo_vmware.api [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Task: {'id': task-2925589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126303} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.053856] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.054185] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.054409] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.054633] env[69994]: INFO nova.compute.manager [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Took 1.11 seconds to destroy the instance on the hypervisor. [ 879.054889] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.055236] env[69994]: DEBUG nova.compute.manager [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.055370] env[69994]: DEBUG nova.network.neutron [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.143859] env[69994]: DEBUG nova.compute.utils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.148548] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 879.148548] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.185338] env[69994]: DEBUG nova.policy [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.193540] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.193540] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 06fa5ab5-baab-466e-8574-5391247c13a8/06fa5ab5-baab-466e-8574-5391247c13a8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.193540] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.193540] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d41f7adf-0c8c-4d0b-9bde-6a9e068cfa63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.203470] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 879.203470] env[69994]: value = "task-2925594" [ 879.203470] env[69994]: _type = "Task" [ 879.203470] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.214319] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.228847] env[69994]: DEBUG oslo_vmware.api [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321189} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.229513] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.229513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.229629] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.229872] env[69994]: INFO nova.compute.manager [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 879.230183] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.230416] env[69994]: DEBUG nova.compute.manager [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.230563] env[69994]: DEBUG nova.network.neutron [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.401490] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.401490] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5240832d-3011-b613-c9fc-275f27c5d673" [ 879.401490] env[69994]: _type = "HttpNfcLease" [ 879.401490] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 879.401710] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 879.401710] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5240832d-3011-b613-c9fc-275f27c5d673" [ 879.401710] env[69994]: _type = "HttpNfcLease" [ 879.401710] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 879.404901] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef9ec4c-9b03-464d-bc98-ca6d4943d739 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.412862] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 879.415569] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 879.526427] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f1f73778-c485-40d4-a8fc-697e213fa3dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.650679] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.671091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4181b9d-5fcc-4e26-87ca-86570e0ba1e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.675127] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Successfully created port: cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.683160] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49041e1f-5cc3-4f10-a04d-14143d663011 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.716865] env[69994]: DEBUG nova.network.neutron [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.721521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbd2edb-b0a3-472d-87c9-e6613e441e13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.733165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641b1a7b-6a64-477a-ae41-26be750aa7b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.738189] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07159} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.738439] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.739628] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bba24f4-0551-4a24-8944-f998cf13d2db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.750455] env[69994]: DEBUG nova.compute.provider_tree [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.772530] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 06fa5ab5-baab-466e-8574-5391247c13a8/06fa5ab5-baab-466e-8574-5391247c13a8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.775237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a13afb5f-b83b-4e36-ab04-f640032f434f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.800759] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 879.800759] env[69994]: value = "task-2925595" [ 879.800759] env[69994]: _type = "Task" [ 879.800759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.807517] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.811904] env[69994]: DEBUG nova.compute.manager [req-ddb0896d-db8e-4391-8256-fff34b8b4453 req-1a7f0d83-0b84-44be-9eae-6f733ba9bc0f service nova] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Received event network-vif-deleted-7672d351-d6eb-466c-87d1-f7f798da34d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.225714] env[69994]: INFO nova.compute.manager [-] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Took 1.00 seconds to deallocate network for instance. [ 880.255123] env[69994]: DEBUG nova.scheduler.client.report [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.292548] env[69994]: DEBUG nova.network.neutron [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.309308] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925595, 'name': ReconfigVM_Task, 'duration_secs': 0.340243} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.309694] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 06fa5ab5-baab-466e-8574-5391247c13a8/06fa5ab5-baab-466e-8574-5391247c13a8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.310386] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f22a0aa-fe78-413a-ac14-bf12406dfbef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.316582] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 880.316582] env[69994]: value = "task-2925596" [ 880.316582] env[69994]: _type = "Task" [ 880.316582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.326332] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925596, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.659393] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.689499] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.690304] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.690304] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.690446] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.691025] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.691025] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.691025] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.691152] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.691377] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.691603] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.691835] env[69994]: DEBUG nova.virt.hardware [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.692769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04f6b1e-290a-4885-af5d-0a10a597e132 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.701523] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf55e2c0-d9b2-4a10-bfb0-67593d643650 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.732135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.761287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.763999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.497s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.764626] env[69994]: DEBUG nova.objects.instance [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lazy-loading 'resources' on Instance uuid a589ddb9-947b-4ff4-94f6-1fab4bdb874b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.784414] env[69994]: INFO nova.scheduler.client.report [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleted allocations for instance bb062ddc-5281-4957-bb9d-8f5c0b0ba526 [ 880.794949] env[69994]: INFO nova.compute.manager [-] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Took 1.74 seconds to deallocate network for instance. [ 880.829160] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925596, 'name': Rename_Task, 'duration_secs': 0.142662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.829607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.829968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01ac4cf3-4de5-42c6-b9a6-d93cc04cfe5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.839472] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 880.839472] env[69994]: value = "task-2925597" [ 880.839472] env[69994]: _type = "Task" [ 880.839472] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.848064] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.294747] env[69994]: DEBUG nova.compute.manager [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-plugged-cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 881.295157] env[69994]: DEBUG oslo_concurrency.lockutils [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.295417] env[69994]: DEBUG oslo_concurrency.lockutils [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.295677] env[69994]: DEBUG oslo_concurrency.lockutils [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.295788] env[69994]: DEBUG nova.compute.manager [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] No waiting events found dispatching network-vif-plugged-cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 881.295967] env[69994]: WARNING nova.compute.manager [req-3ace31a3-b239-4ae1-88d4-c1e3d9b61e1c req-519eda6c-c2e0-4a14-be8d-135f792e918a service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received unexpected event network-vif-plugged-cf663439-5f58-4ebe-9323-5937dcc425a7 for instance with vm_state building and task_state spawning. [ 881.300064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4d48e0b7-357d-4092-bcbf-c1d800799c21 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "bb062ddc-5281-4957-bb9d-8f5c0b0ba526" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.919s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.302017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.352048] env[69994]: DEBUG oslo_vmware.api [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925597, 'name': PowerOnVM_Task, 'duration_secs': 0.454035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.352134] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.352387] env[69994]: INFO nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Took 6.48 seconds to spawn the instance on the hypervisor. [ 881.352637] env[69994]: DEBUG nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.353471] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f060d63-5ff7-4f03-a274-8a0d38dbb763 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.370737] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Successfully updated port: cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.691254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4421c10b-cebf-45b9-aff8-367e1f600b8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.699731] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd706188-6dc3-4ffc-9310-905fff982683 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.733522] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da37ec5e-dc34-400a-b2d0-70c0809b7c04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.746291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a261d8b8-b018-4467-bb2c-eedd7d4f39e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.759927] env[69994]: DEBUG nova.compute.provider_tree [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.872589] env[69994]: INFO nova.compute.manager [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Took 40.47 seconds to build instance. [ 881.873717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.873790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.873917] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.961225] env[69994]: DEBUG nova.compute.manager [req-d1f72604-3c9e-441e-a945-97e438398816 req-63968722-3244-4de7-9a3a-ce4dcb024c6f service nova] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Received event network-vif-deleted-68b29b35-015e-4545-af50-70655d1914db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.264126] env[69994]: DEBUG nova.scheduler.client.report [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.312997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "9b6aca3c-337b-4067-80e0-487d956fabc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.313266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.315874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.315874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.315874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.318108] env[69994]: INFO nova.compute.manager [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Terminating instance [ 882.375571] env[69994]: DEBUG oslo_concurrency.lockutils [None req-267b3368-97b5-4e42-a36f-b1a38d03f766 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.985s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.417381] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.584542] env[69994]: DEBUG nova.network.neutron [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.770355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.773308] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 24.377s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.796156] env[69994]: INFO nova.scheduler.client.report [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Deleted allocations for instance a589ddb9-947b-4ff4-94f6-1fab4bdb874b [ 882.822214] env[69994]: DEBUG nova.compute.manager [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 882.822480] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.823575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04469745-f78f-4775-b996-9b4dcec3a7d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.833396] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.833683] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2fe58db-dc6d-4ecc-a82f-7b542a405f6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.840355] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 882.840355] env[69994]: value = "task-2925598" [ 882.840355] env[69994]: _type = "Task" [ 882.840355] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.849894] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.088270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.089293] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Instance network_info: |[{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 883.089293] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:91:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf663439-5f58-4ebe-9323-5937dcc425a7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.096784] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.097146] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.097296] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b1b0afe-ace9-4d36-9729-719413567076 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.117516] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.117516] env[69994]: value = "task-2925599" [ 883.117516] env[69994]: _type = "Task" [ 883.117516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.125370] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925599, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.276517] env[69994]: DEBUG nova.objects.instance [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lazy-loading 'migration_context' on Instance uuid f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.304892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67c85867-91a3-4bdf-abec-d5cbfdf805f0 tempest-ListServerFiltersTestJSON-2079587369 tempest-ListServerFiltersTestJSON-2079587369-project-member] Lock "a589ddb9-947b-4ff4-94f6-1fab4bdb874b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.696s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.324702] env[69994]: DEBUG nova.compute.manager [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-changed-cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.324702] env[69994]: DEBUG nova.compute.manager [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing instance network info cache due to event network-changed-cf663439-5f58-4ebe-9323-5937dcc425a7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.325077] env[69994]: DEBUG oslo_concurrency.lockutils [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.325113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.325841] env[69994]: DEBUG nova.network.neutron [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing network info cache for port cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.351912] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925598, 'name': PowerOffVM_Task, 'duration_secs': 0.211949} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.352267] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.352417] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.352681] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b5247b6-3102-4800-9229-9a60b03130fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.403520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.403689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.424139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.424393] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.424594] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleting the datastore file [datastore2] 9b6aca3c-337b-4067-80e0-487d956fabc7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.424887] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c6fa11d-9823-4a67-90c6-0ec28e13150b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.431836] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for the task: (returnval){ [ 883.431836] env[69994]: value = "task-2925601" [ 883.431836] env[69994]: _type = "Task" [ 883.431836] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.443989] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.629041] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925599, 'name': CreateVM_Task, 'duration_secs': 0.431315} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.629245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.629988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.630177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.630503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 883.630898] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6f7a63c-e65b-4105-82ef-b095afecaf23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.635625] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 883.635625] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294fa70-72f8-a789-7013-095ebec783c2" [ 883.635625] env[69994]: _type = "Task" [ 883.635625] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.646462] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294fa70-72f8-a789-7013-095ebec783c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.905623] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 883.947953] env[69994]: DEBUG oslo_vmware.api [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Task: {'id': task-2925601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226522} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.948316] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.951020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.951020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.951020] env[69994]: INFO nova.compute.manager [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 883.951020] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.951020] env[69994]: DEBUG nova.compute.manager [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.951020] env[69994]: DEBUG nova.network.neutron [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.149129] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294fa70-72f8-a789-7013-095ebec783c2, 'name': SearchDatastore_Task, 'duration_secs': 0.014466} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.151744] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.152041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.152297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.152442] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.152616] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.153140] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2351f8b5-74ae-4a16-b6cf-b96e91a78394 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.164574] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.164842] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.165754] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8966d479-e711-4c0b-be1e-310ee70fae8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.174205] env[69994]: DEBUG nova.network.neutron [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updated VIF entry in instance network info cache for port cf663439-5f58-4ebe-9323-5937dcc425a7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.175558] env[69994]: DEBUG nova.network.neutron [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.178311] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 884.178311] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a1106-eaad-e8d3-16dc-fe4397cd366d" [ 884.178311] env[69994]: _type = "Task" [ 884.178311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.188927] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a1106-eaad-e8d3-16dc-fe4397cd366d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.217445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b5a201-696b-434b-9307-efac117bf981 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.227317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faee103-ce7c-46cf-a4b2-d7909c68b3c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.263360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c2ab1e-0f1c-4aad-9555-d4f027403bc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.275426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21c71f0-c94c-4320-afc1-afe80e8ff1d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.290894] env[69994]: DEBUG nova.compute.provider_tree [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.292501] env[69994]: DEBUG nova.network.neutron [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.321054] env[69994]: DEBUG nova.compute.manager [req-e3564f60-974e-4063-a739-5bd46fe76af7 req-7d34b3c3-2028-4e95-9a3e-a34ec51e4737 service nova] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Received event network-vif-deleted-9cd8099f-e38f-4f62-a5f8-b0edfafa2cb1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.432784] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.683215] env[69994]: DEBUG oslo_concurrency.lockutils [req-8471f2c8-45dc-4085-a47e-5d925f348755 req-c85c458d-cd10-4d89-8c8d-b3548e04dffd service nova] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.689215] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a1106-eaad-e8d3-16dc-fe4397cd366d, 'name': SearchDatastore_Task, 'duration_secs': 0.017167} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.690186] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51af294c-c142-479d-904b-1377c85f2c72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.695949] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 884.695949] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c5277c-3f28-8173-0461-e00b8c3a1636" [ 884.695949] env[69994]: _type = "Task" [ 884.695949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.704219] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c5277c-3f28-8173-0461-e00b8c3a1636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.798044] env[69994]: DEBUG nova.scheduler.client.report [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 884.800431] env[69994]: INFO nova.compute.manager [-] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Took 0.85 seconds to deallocate network for instance. [ 885.209026] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c5277c-3f28-8173-0461-e00b8c3a1636, 'name': SearchDatastore_Task, 'duration_secs': 0.016605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.209026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.209026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] d5af7ae1-d68e-4170-b762-e56d7f2551d7/d5af7ae1-d68e-4170-b762-e56d7f2551d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.209026] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2f3989e-6688-4e9f-99aa-33b5968327a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.218039] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 885.218039] env[69994]: value = "task-2925602" [ 885.218039] env[69994]: _type = "Task" [ 885.218039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.223981] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.306618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.729835] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925602, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.808229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.035s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.821033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.691s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.822846] env[69994]: INFO nova.compute.claims [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.228092] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614392} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.228528] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] d5af7ae1-d68e-4170-b762-e56d7f2551d7/d5af7ae1-d68e-4170-b762-e56d7f2551d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.228602] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.228985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb29af25-25bb-4b14-be55-90be4ae487fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.235146] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 886.235146] env[69994]: value = "task-2925603" [ 886.235146] env[69994]: _type = "Task" [ 886.235146] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.245294] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925603, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.633631] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 886.634111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb892e0c-3412-4898-9dbd-b7ae70ae7e8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.640673] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 886.640841] env[69994]: ERROR oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk due to incomplete transfer. [ 886.641086] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f0e8f404-2a5a-462e-995f-141b0b93cb71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.649223] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5278cf1c-e53a-2378-373f-e1b0ce4a9d3e/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 886.649426] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Uploaded image 9630d505-0b21-4142-8042-3205fbfedde4 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 886.651678] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 886.651940] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f047c5ce-1cef-4a6b-a2bd-3c134ce3c5ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.658204] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 886.658204] env[69994]: value = "task-2925604" [ 886.658204] env[69994]: _type = "Task" [ 886.658204] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.668076] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925604, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.747259] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925603, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07356} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.747510] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.748343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1b28f1-e5ad-4963-9969-094b78865def {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.772783] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] d5af7ae1-d68e-4170-b762-e56d7f2551d7/d5af7ae1-d68e-4170-b762-e56d7f2551d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.772923] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cee1be3-605f-4f1e-98fb-bf315521858a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.792536] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 886.792536] env[69994]: value = "task-2925605" [ 886.792536] env[69994]: _type = "Task" [ 886.792536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.803074] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925605, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.167927] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925604, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.306953] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.313956] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b71d87a-a39e-40bf-b046-97cf41b13d21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.323335] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f792cc3f-755c-4ac6-a786-e471fe243047 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.357733] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f271194a-21e4-47be-b53c-d584361981f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.365481] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683119fd-6330-4521-9b94-e4e564312d70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.373384] env[69994]: INFO nova.compute.manager [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Swapping old allocation on dict_keys(['2173cd1f-90eb-4aab-b51d-83c140d1a7be']) held by migration 18246889-8825-4ae1-9687-068788eac036 for instance [ 887.384798] env[69994]: DEBUG nova.compute.provider_tree [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.400509] env[69994]: DEBUG nova.scheduler.client.report [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Overwriting current allocation {'allocations': {'2173cd1f-90eb-4aab-b51d-83c140d1a7be': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 85}}, 'project_id': '5642969c42ae403cbfb4d5989e399f8d', 'user_id': 'fc0799f063d84f6aa0953ecb32f106ee', 'consumer_generation': 1} on consumer f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 887.496698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.496905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquired lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.497094] env[69994]: DEBUG nova.network.neutron [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.670367] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925604, 'name': Destroy_Task, 'duration_secs': 0.613262} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.670721] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Destroyed the VM [ 887.670978] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.671302] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-17930c52-0e18-4eb4-ad68-315142fa34fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.678067] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 887.678067] env[69994]: value = "task-2925606" [ 887.678067] env[69994]: _type = "Task" [ 887.678067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.689017] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925606, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.802930] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925605, 'name': ReconfigVM_Task, 'duration_secs': 0.991887} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.802930] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfigured VM instance instance-0000003a to attach disk [datastore1] d5af7ae1-d68e-4170-b762-e56d7f2551d7/d5af7ae1-d68e-4170-b762-e56d7f2551d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.803104] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-120d9d23-e23b-4d0a-a00b-b117979843de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.808561] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 887.808561] env[69994]: value = "task-2925607" [ 887.808561] env[69994]: _type = "Task" [ 887.808561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.820628] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925607, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.890025] env[69994]: DEBUG nova.scheduler.client.report [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.194271] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925606, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.259322] env[69994]: DEBUG nova.network.neutron [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [{"id": "885142d2-3a31-487c-b773-a0b0df2e4e40", "address": "fa:16:3e:76:b6:04", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885142d2-3a", "ovs_interfaceid": "885142d2-3a31-487c-b773-a0b0df2e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.299470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "8001cb13-6a52-451b-b4b6-57b893975079" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.299597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.319138] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925607, 'name': Rename_Task, 'duration_secs': 0.220808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.319367] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.319752] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03c71312-0e24-4365-883c-bb62ecb767bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.325601] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 888.325601] env[69994]: value = "task-2925608" [ 888.325601] env[69994]: _type = "Task" [ 888.325601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.333491] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.393734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.394333] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.397141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.759s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.398939] env[69994]: INFO nova.compute.claims [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.688521] env[69994]: DEBUG oslo_vmware.api [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925606, 'name': RemoveSnapshot_Task, 'duration_secs': 0.515701} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.688852] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 888.689113] env[69994]: INFO nova.compute.manager [None req-c58aa5a5-dbd9-4d4c-b667-28590bc2eedb tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 13.45 seconds to snapshot the instance on the hypervisor. [ 888.762374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Releasing lock "refresh_cache-f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.762860] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.763152] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e8d47e5-8763-4a61-93d5-8b643ec57fb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.771107] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 888.771107] env[69994]: value = "task-2925609" [ 888.771107] env[69994]: _type = "Task" [ 888.771107] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.779779] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.801880] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 888.835345] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925608, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.903831] env[69994]: DEBUG nova.compute.utils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 888.908262] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 888.908345] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.975656] env[69994]: DEBUG nova.policy [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '384fc017e6c243c9b5f7f396aa8028ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '891cfe67dd0044f3920402752215e361', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.282034] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925609, 'name': PowerOffVM_Task, 'duration_secs': 0.190821} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.282346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.283009] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:32:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='231a442d-6575-43ed-8970-683d59890f06',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1776927979',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.283226] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.283379] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.283559] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.283896] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.283896] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.284105] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.284266] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.284433] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.284593] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.284764] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.289879] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62f7bded-cad8-4523-8f21-4b66b38dcf13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.304145] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Successfully created port: 2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.309894] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 889.309894] env[69994]: value = "task-2925610" [ 889.309894] env[69994]: _type = "Task" [ 889.309894] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.322350] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925610, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.331914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.335751] env[69994]: DEBUG oslo_vmware.api [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2925608, 'name': PowerOnVM_Task, 'duration_secs': 0.738704} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.336054] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.336270] env[69994]: INFO nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Took 8.68 seconds to spawn the instance on the hypervisor. [ 889.336452] env[69994]: DEBUG nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.337434] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6506c771-19e2-4c4e-a583-9b477a2f8640 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.415745] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 889.783437] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f94cdd1-2515-4df9-a29e-00896e50b621 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.790717] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f67608-ee68-4a1b-be1f-d2eb014f5b03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.825021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97688eaf-c0aa-49b3-8edb-792cef4367a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.832686] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925610, 'name': ReconfigVM_Task, 'duration_secs': 0.340977} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.835326] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a89d00-36f8-4d41-bae8-a21d6e261a2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.838683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05d82fb-52e9-47b0-b7f9-e91d2cdbfd63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.857011] env[69994]: DEBUG nova.compute.provider_tree [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.874680] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:32:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='231a442d-6575-43ed-8970-683d59890f06',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1776927979',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.874918] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.875087] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.875271] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.875438] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.875598] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.875803] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.875960] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.876141] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.876308] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.876481] env[69994]: DEBUG nova.virt.hardware [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.878498] env[69994]: DEBUG nova.scheduler.client.report [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.881712] env[69994]: INFO nova.compute.manager [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Took 46.43 seconds to build instance. [ 889.882473] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6435207-7435-4b9a-8b37-a6cc50a93297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.889941] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 889.889941] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df42-08a5-83d8-76a0-8a8c8decf6c1" [ 889.889941] env[69994]: _type = "Task" [ 889.889941] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.899019] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df42-08a5-83d8-76a0-8a8c8decf6c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.386406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.989s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.386988] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 890.389634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56eada9-2c4b-4255-80fa-e1cbcf8c2487 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.946s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.392615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.439s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.392615] env[69994]: INFO nova.compute.claims [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.403537] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220df42-08a5-83d8-76a0-8a8c8decf6c1, 'name': SearchDatastore_Task, 'duration_secs': 0.010103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.409063] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.409361] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7a3b755-bbf7-49ca-8e5a-c2abb3dff119 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.427126] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 890.431551] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 890.431551] env[69994]: value = "task-2925611" [ 890.431551] env[69994]: _type = "Task" [ 890.431551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.442448] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925611, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.461889] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.462216] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.462216] env[69994]: DEBUG nova.virt.hardware [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.463278] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14180f10-77da-432c-8d88-a9f60f081db6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.471608] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d132697-448c-4869-b925-f1a916e96b6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.582174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "9d146d57-9948-4b18-a3f3-675b53d137ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.582174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.582174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.582174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.582174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.583915] env[69994]: INFO nova.compute.manager [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Terminating instance [ 890.649389] env[69994]: DEBUG nova.compute.manager [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-changed-cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.651600] env[69994]: DEBUG nova.compute.manager [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing instance network info cache due to event network-changed-cf663439-5f58-4ebe-9323-5937dcc425a7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 890.651600] env[69994]: DEBUG oslo_concurrency.lockutils [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.651600] env[69994]: DEBUG oslo_concurrency.lockutils [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.651600] env[69994]: DEBUG nova.network.neutron [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing network info cache for port cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.897820] env[69994]: DEBUG nova.compute.utils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 890.902212] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 890.902398] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.922030] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Successfully updated port: 2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.942915] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925611, 'name': ReconfigVM_Task, 'duration_secs': 0.351085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.944323] env[69994]: DEBUG nova.policy [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c05ace0df7fe4a72bb3045dcb50fdfe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a9a6d7e114941d5a384d9907b491335', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 890.945805] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 890.946620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292d6684-5067-41ad-903a-818ae31bb5f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.968982] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.969270] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62157fc2-d5ac-49b8-aceb-18dd97743b4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.988248] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 890.988248] env[69994]: value = "task-2925612" [ 890.988248] env[69994]: _type = "Task" [ 890.988248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.997460] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.086896] env[69994]: DEBUG nova.compute.manager [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.086896] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.087760] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d2a247-830d-4bba-a59c-e16b5e0f2525 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.095975] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.096282] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa36f6ed-ab32-4bfb-a4f8-fd09e7c24c83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.102298] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 891.102298] env[69994]: value = "task-2925613" [ 891.102298] env[69994]: _type = "Task" [ 891.102298] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.110371] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.220733] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Successfully created port: aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.406404] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 891.427273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.427492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.427726] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.439569] env[69994]: DEBUG nova.network.neutron [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updated VIF entry in instance network info cache for port cf663439-5f58-4ebe-9323-5937dcc425a7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.439793] env[69994]: DEBUG nova.network.neutron [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.500657] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925612, 'name': ReconfigVM_Task, 'duration_secs': 0.278466} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.501675] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8/f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.501792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b86e46-6b8a-4a35-91f0-88be30506ffb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.530329] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11b4099-6847-44d9-9191-83d0ef17f27a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.552203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9640094b-1f3f-48f3-88b7-151e1dd09582 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.575687] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b976ddb9-4e9b-4179-95c0-48683d2bbc9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.582774] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 891.583058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6cfb45e-b2fa-454e-abce-657aa71de55e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.589330] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 891.589330] env[69994]: value = "task-2925614" [ 891.589330] env[69994]: _type = "Task" [ 891.589330] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.599442] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.613700] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925613, 'name': PowerOffVM_Task, 'duration_secs': 0.185123} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.613963] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.614148] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.614403] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52fbdfee-2a00-4a54-8f97-060005c1f1d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.674137] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.674137] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.674137] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Deleting the datastore file [datastore1] 9d146d57-9948-4b18-a3f3-675b53d137ed {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.678129] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c2deb37-6dcb-404d-a773-2344af1ce0c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.685396] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for the task: (returnval){ [ 891.685396] env[69994]: value = "task-2925616" [ 891.685396] env[69994]: _type = "Task" [ 891.685396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.693077] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.841008] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ee2479-eb36-403c-b531-1f346113a049 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.848537] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9ee9ee-a0a7-438c-a91c-a03af0ff5828 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.878602] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a8343e-250d-4f66-bca8-d835b0f822ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.885633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a713e4c4-e6b9-42ee-868e-24768d9e9dc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.898851] env[69994]: DEBUG nova.compute.provider_tree [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.942726] env[69994]: DEBUG oslo_concurrency.lockutils [req-9c147034-fae6-433f-ad56-1ced995a8286 req-26109eb9-155e-4d82-8f02-22114fccc325 service nova] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.961886] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.098779] env[69994]: DEBUG oslo_vmware.api [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925614, 'name': PowerOnVM_Task, 'duration_secs': 0.430973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.099171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.102977] env[69994]: DEBUG nova.network.neutron [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updating instance_info_cache with network_info: [{"id": "2c8ad636-498e-4d08-8915-5d11ff684a84", "address": "fa:16:3e:95:ea:01", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c8ad636-49", "ovs_interfaceid": "2c8ad636-498e-4d08-8915-5d11ff684a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.195134] env[69994]: DEBUG oslo_vmware.api [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Task: {'id': task-2925616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.195345] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.195528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.195738] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.195936] env[69994]: INFO nova.compute.manager [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 1.11 seconds to destroy the instance on the hypervisor. [ 892.196200] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.196394] env[69994]: DEBUG nova.compute.manager [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.196493] env[69994]: DEBUG nova.network.neutron [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.417801] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 892.425951] env[69994]: ERROR nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [req-d8754a5f-fc0e-486c-9eef-b7feee3e1755] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d8754a5f-fc0e-486c-9eef-b7feee3e1755"}]} [ 892.443709] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 892.444118] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.444296] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 892.444506] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.444629] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 892.444815] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 892.445050] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 892.445412] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 892.445412] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 892.445622] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 892.445822] env[69994]: DEBUG nova.virt.hardware [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 892.446730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd3682c-7509-4368-b121-5f1f017a219f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.450390] env[69994]: DEBUG nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 892.458440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbfae2c-79e4-45af-9c4b-337a3536cdd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.473698] env[69994]: DEBUG nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 892.473698] env[69994]: DEBUG nova.compute.provider_tree [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.485839] env[69994]: DEBUG nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 892.504118] env[69994]: DEBUG nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 892.614835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.615340] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Instance network_info: |[{"id": "2c8ad636-498e-4d08-8915-5d11ff684a84", "address": "fa:16:3e:95:ea:01", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c8ad636-49", "ovs_interfaceid": "2c8ad636-498e-4d08-8915-5d11ff684a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.616741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:ea:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c8ad636-498e-4d08-8915-5d11ff684a84', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.629968] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Creating folder: Project (891cfe67dd0044f3920402752215e361). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.634208] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4ab91ab-8fd3-4257-bbc6-63d677c5f92c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.649199] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Created folder: Project (891cfe67dd0044f3920402752215e361) in parent group-v587342. [ 892.649199] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Creating folder: Instances. Parent ref: group-v587507. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.649986] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-906a8f89-9715-4c29-a765-87c8d41b549a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.659352] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Created folder: Instances in parent group-v587507. [ 892.659605] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.659833] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.660063] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94abf838-671d-4811-b0e8-d795c45a5e4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.684970] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.684970] env[69994]: value = "task-2925619" [ 892.684970] env[69994]: _type = "Task" [ 892.684970] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.693580] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925619, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.735367] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received event network-vif-plugged-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.735367] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Acquiring lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.735367] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.735367] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.735804] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] No waiting events found dispatching network-vif-plugged-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.736192] env[69994]: WARNING nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received unexpected event network-vif-plugged-2c8ad636-498e-4d08-8915-5d11ff684a84 for instance with vm_state building and task_state spawning. [ 892.736491] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.736758] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing instance network info cache due to event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 892.737076] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Acquiring lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.737452] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Acquired lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.737847] env[69994]: DEBUG nova.network.neutron [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.870722] env[69994]: DEBUG nova.compute.manager [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Received event network-vif-plugged-aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.870938] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] Acquiring lock "b80a405e-a02e-4b18-a325-753146533d1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.871154] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] Lock "b80a405e-a02e-4b18-a325-753146533d1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.871321] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] Lock "b80a405e-a02e-4b18-a325-753146533d1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.871514] env[69994]: DEBUG nova.compute.manager [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] No waiting events found dispatching network-vif-plugged-aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.871680] env[69994]: WARNING nova.compute.manager [req-f3a37600-e463-4136-9f66-6e7baed83325 req-4903f31e-9a5b-4ac3-af05-aacf663d3cc5 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Received unexpected event network-vif-plugged-aabfeee4-193c-4644-afc0-35960f4acff8 for instance with vm_state building and task_state spawning. [ 892.910947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4105284e-0c62-4b7a-a0d9-dddaf09110fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.917715] env[69994]: DEBUG nova.network.neutron [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.919636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bdd206-81a7-44f1-83e8-7df74e670254 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.926852] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Successfully updated port: aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.959176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2fd1f7-139a-4ff2-bccf-36346be03bcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.967346] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6674e796-acc1-45b0-9968-b1cbc51a21ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.981367] env[69994]: DEBUG nova.compute.provider_tree [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.118055] env[69994]: INFO nova.compute.manager [None req-f89eacfd-b46d-40ef-ab3c-7a4d26f5890c tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance to original state: 'active' [ 893.195144] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925619, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.429440] env[69994]: INFO nova.compute.manager [-] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Took 1.23 seconds to deallocate network for instance. [ 893.431531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.431804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.431954] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.436461] env[69994]: DEBUG nova.network.neutron [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updated VIF entry in instance network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.436833] env[69994]: DEBUG nova.network.neutron [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updating instance_info_cache with network_info: [{"id": "2c8ad636-498e-4d08-8915-5d11ff684a84", "address": "fa:16:3e:95:ea:01", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c8ad636-49", "ovs_interfaceid": "2c8ad636-498e-4d08-8915-5d11ff684a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.513430] env[69994]: DEBUG nova.scheduler.client.report [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 893.513709] env[69994]: DEBUG nova.compute.provider_tree [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 87 to 88 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 893.513899] env[69994]: DEBUG nova.compute.provider_tree [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.695777] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925619, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.939602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.940403] env[69994]: DEBUG oslo_concurrency.lockutils [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] Releasing lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.940494] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Received event network-vif-deleted-7058cb7d-792e-4141-9145-c7cfde6b5700 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 893.941227] env[69994]: INFO nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Neutron deleted interface 7058cb7d-792e-4141-9145-c7cfde6b5700; detaching it from the instance and deleting it from the info cache [ 893.941227] env[69994]: DEBUG nova.network.neutron [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.981760] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.019936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.630s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.020505] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 894.027520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.046s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.029020] env[69994]: INFO nova.compute.claims [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.151160] env[69994]: DEBUG nova.network.neutron [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Updating instance_info_cache with network_info: [{"id": "aabfeee4-193c-4644-afc0-35960f4acff8", "address": "fa:16:3e:02:33:10", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaabfeee4-19", "ovs_interfaceid": "aabfeee4-193c-4644-afc0-35960f4acff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.197322] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925619, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.443277] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59e02329-0f1c-4d0f-ac52-bb627c21aca5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.453708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d5cadb-6716-4c8f-9abe-8ce9e1085490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.487107] env[69994]: DEBUG nova.compute.manager [req-56a81c0a-c939-4144-87f1-f5866f8a67b7 req-a6289ed8-9b56-47cd-96d9-e2f13004f709 service nova] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Detach interface failed, port_id=7058cb7d-792e-4141-9145-c7cfde6b5700, reason: Instance 9d146d57-9948-4b18-a3f3-675b53d137ed could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 894.529647] env[69994]: DEBUG nova.compute.utils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 894.532041] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.532242] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.576938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.577236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.577514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.577736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.577909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.580927] env[69994]: DEBUG nova.policy [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550fe2bfeab14f0fa409c65d98954e7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21bf4c6f3b2c45218949b0e6c1eb84fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.582597] env[69994]: INFO nova.compute.manager [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Terminating instance [ 894.654467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.654827] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Instance network_info: |[{"id": "aabfeee4-193c-4644-afc0-35960f4acff8", "address": "fa:16:3e:02:33:10", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaabfeee4-19", "ovs_interfaceid": "aabfeee4-193c-4644-afc0-35960f4acff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 894.655343] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:33:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aabfeee4-193c-4644-afc0-35960f4acff8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.662964] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.663503] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.663739] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe41a402-9feb-46a9-a198-da81e377f587 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.684454] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.684454] env[69994]: value = "task-2925620" [ 894.684454] env[69994]: _type = "Task" [ 894.684454] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.693357] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925620, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.697942] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925619, 'name': CreateVM_Task, 'duration_secs': 1.742364} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.698114] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.698806] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.698983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.699321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.699575] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f9bfed9-a512-4852-8dde-76cd6e5970a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.704159] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 894.704159] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d22585-19ba-9ae9-cbc5-6f9f29b3cdf3" [ 894.704159] env[69994]: _type = "Task" [ 894.704159] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.711731] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d22585-19ba-9ae9-cbc5-6f9f29b3cdf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.876146] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Successfully created port: 02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.903821] env[69994]: DEBUG nova.compute.manager [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Received event network-changed-aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 894.903930] env[69994]: DEBUG nova.compute.manager [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Refreshing instance network info cache due to event network-changed-aabfeee4-193c-4644-afc0-35960f4acff8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 894.904158] env[69994]: DEBUG oslo_concurrency.lockutils [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] Acquiring lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.904319] env[69994]: DEBUG oslo_concurrency.lockutils [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] Acquired lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.904478] env[69994]: DEBUG nova.network.neutron [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Refreshing network info cache for port aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.035667] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 895.089221] env[69994]: DEBUG nova.compute.manager [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 895.089491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.090808] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb8c4f2-41ab-420a-b31b-af3e3956ceb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.102716] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.103020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f059150-2fb9-46a5-bf63-873a6087723b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.110944] env[69994]: DEBUG oslo_vmware.api [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 895.110944] env[69994]: value = "task-2925621" [ 895.110944] env[69994]: _type = "Task" [ 895.110944] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.119135] env[69994]: DEBUG oslo_vmware.api [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.195056] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925620, 'name': CreateVM_Task, 'duration_secs': 0.375612} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.197552] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.199094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.217614] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d22585-19ba-9ae9-cbc5-6f9f29b3cdf3, 'name': SearchDatastore_Task, 'duration_secs': 0.01305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.217903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.218144] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.218619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.218619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.218764] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.219227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.219913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 895.219913] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8cfc0d3-b1e2-496a-99ae-2b1240e5da41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.223194] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b0a6a1a-05ef-48c4-b75a-b74f562bd2f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.227514] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 895.227514] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520ddb15-5021-66df-6159-60ba226dfccb" [ 895.227514] env[69994]: _type = "Task" [ 895.227514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.232764] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.232997] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.238364] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc783fa-ea6b-4588-a588-07024ae0c7e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.240487] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520ddb15-5021-66df-6159-60ba226dfccb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.243393] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 895.243393] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520cb24b-3f44-961e-2b06-bbdc486de841" [ 895.243393] env[69994]: _type = "Task" [ 895.243393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.250501] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520cb24b-3f44-961e-2b06-bbdc486de841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.381360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba895728-a6a3-4e91-84d8-d1d7140c4731 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.388657] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e15718-ec09-4466-89a3-1118718dc4c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.420481] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c098b88a-1c50-4ba5-9719-c23da92ae7ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.427756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20eab22-8c38-4d5b-83a1-e3275d6724b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.445983] env[69994]: DEBUG nova.compute.provider_tree [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.623030] env[69994]: DEBUG oslo_vmware.api [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925621, 'name': PowerOffVM_Task, 'duration_secs': 0.370492} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.623299] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.623471] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.623732] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b4bfb44-1d50-468d-9bc7-105a9e513608 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.663874] env[69994]: DEBUG nova.network.neutron [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Updated VIF entry in instance network info cache for port aabfeee4-193c-4644-afc0-35960f4acff8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.664270] env[69994]: DEBUG nova.network.neutron [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Updating instance_info_cache with network_info: [{"id": "aabfeee4-193c-4644-afc0-35960f4acff8", "address": "fa:16:3e:02:33:10", "network": {"id": "0d173c7c-24a5-47c8-b6e4-716b3226c39b", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1739066242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a9a6d7e114941d5a384d9907b491335", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaabfeee4-19", "ovs_interfaceid": "aabfeee4-193c-4644-afc0-35960f4acff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.725843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.726101] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.726294] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleting the datastore file [datastore2] f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.726549] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b462240-20b0-4d54-b6f9-6a0bff1140af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.737711] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520ddb15-5021-66df-6159-60ba226dfccb, 'name': SearchDatastore_Task, 'duration_secs': 0.011086} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.738846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.739091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.739303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.739869] env[69994]: DEBUG oslo_vmware.api [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 895.739869] env[69994]: value = "task-2925623" [ 895.739869] env[69994]: _type = "Task" [ 895.739869] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.753854] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520cb24b-3f44-961e-2b06-bbdc486de841, 'name': SearchDatastore_Task, 'duration_secs': 0.010668} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.754877] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b16a6d-f3f2-4c8a-9b5e-e6e665432b91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.759288] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 895.759288] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ed3375-afbe-28f1-f0d9-cc0a81df5566" [ 895.759288] env[69994]: _type = "Task" [ 895.759288] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.766373] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ed3375-afbe-28f1-f0d9-cc0a81df5566, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.949399] env[69994]: DEBUG nova.scheduler.client.report [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.049249] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 896.074880] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='107f5ef1948a853046fc002148423912',container_format='bare',created_at=2025-03-11T12:34:30Z,direct_url=,disk_format='vmdk',id=136e080b-9934-48bc-87a1-4505254582a3,min_disk=1,min_ram=0,name='tempest-test-snap-1731613837',owner='21bf4c6f3b2c45218949b0e6c1eb84fd',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-11T12:34:44Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.076215] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.076474] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.076523] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.076739] env[69994]: DEBUG nova.virt.hardware [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.077694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2257f493-a782-42d2-8e96-ba94b233104f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.088442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65ade93-3a12-4e6c-8c15-24056f74aa51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.167142] env[69994]: DEBUG oslo_concurrency.lockutils [req-3506ab62-7e2f-438e-9c02-c32cd93b11f7 req-f2c2b696-686b-46d6-b490-457ea9d715d8 service nova] Releasing lock "refresh_cache-b80a405e-a02e-4b18-a325-753146533d1b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.250721] env[69994]: DEBUG oslo_vmware.api [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.251190] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.251475] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.251753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.252044] env[69994]: INFO nova.compute.manager [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 896.252391] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.252681] env[69994]: DEBUG nova.compute.manager [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 896.252878] env[69994]: DEBUG nova.network.neutron [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.277301] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ed3375-afbe-28f1-f0d9-cc0a81df5566, 'name': SearchDatastore_Task, 'duration_secs': 0.008952} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.277301] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.277731] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 565066c4-2f33-44c6-8e82-4c6d729cd0b7/565066c4-2f33-44c6-8e82-4c6d729cd0b7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.278169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.278637] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.278961] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19564cea-16f6-4534-a356-47be15507dbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.281367] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50f09cb3-073e-4b7a-8570-735acd0f6ebd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.291082] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 896.291082] env[69994]: value = "task-2925624" [ 896.291082] env[69994]: _type = "Task" [ 896.291082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.296160] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.296480] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.297651] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dcfcebd-5571-4869-800e-3be9ee5b15f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.304359] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.310031] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 896.310031] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb2a1e-9daa-66d2-1e20-edb87b4ac9f6" [ 896.310031] env[69994]: _type = "Task" [ 896.310031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.315414] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb2a1e-9daa-66d2-1e20-edb87b4ac9f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.434789] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Successfully updated port: 02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.455091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.455619] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 896.458203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.548s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.458430] env[69994]: DEBUG nova.objects.instance [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lazy-loading 'resources' on Instance uuid 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.572418] env[69994]: DEBUG nova.compute.manager [req-71271ecc-6c53-4bd4-a1ea-da332f56b4c4 req-37ded6d9-f4a4-4b96-b068-d1b12c6f718a service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Received event network-vif-deleted-885142d2-3a31-487c-b773-a0b0df2e4e40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.572808] env[69994]: INFO nova.compute.manager [req-71271ecc-6c53-4bd4-a1ea-da332f56b4c4 req-37ded6d9-f4a4-4b96-b068-d1b12c6f718a service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Neutron deleted interface 885142d2-3a31-487c-b773-a0b0df2e4e40; detaching it from the instance and deleting it from the info cache [ 896.572874] env[69994]: DEBUG nova.network.neutron [req-71271ecc-6c53-4bd4-a1ea-da332f56b4c4 req-37ded6d9-f4a4-4b96-b068-d1b12c6f718a service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.800071] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925624, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.817764] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb2a1e-9daa-66d2-1e20-edb87b4ac9f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.818589] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-880561b6-6858-4f59-9a74-c74ed791079b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.824339] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 896.824339] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527453c0-9b0e-a8f3-be53-d4d1f70bc2f5" [ 896.824339] env[69994]: _type = "Task" [ 896.824339] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.831924] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527453c0-9b0e-a8f3-be53-d4d1f70bc2f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.932528] env[69994]: DEBUG nova.compute.manager [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Received event network-vif-plugged-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.932768] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Acquiring lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.932984] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.933171] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.933371] env[69994]: DEBUG nova.compute.manager [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] No waiting events found dispatching network-vif-plugged-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.933546] env[69994]: WARNING nova.compute.manager [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Received unexpected event network-vif-plugged-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f for instance with vm_state building and task_state spawning. [ 896.933707] env[69994]: DEBUG nova.compute.manager [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Received event network-changed-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.933859] env[69994]: DEBUG nova.compute.manager [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Refreshing instance network info cache due to event network-changed-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 896.934054] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Acquiring lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.934221] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Acquired lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.934395] env[69994]: DEBUG nova.network.neutron [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Refreshing network info cache for port 02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.935973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.961760] env[69994]: DEBUG nova.compute.utils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 896.965798] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 896.965972] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.010478] env[69994]: DEBUG nova.policy [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c5c9e7a35034135945c869bf512c23e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93248f8d8f3b435ea8cbe998a1b3aa50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 897.053235] env[69994]: DEBUG nova.network.neutron [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.076077] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e3d99f3-9b14-46d1-8526-c375f21ff403 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.087286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3858430b-8485-4faf-9a04-351f7fd82872 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.121778] env[69994]: DEBUG nova.compute.manager [req-71271ecc-6c53-4bd4-a1ea-da332f56b4c4 req-37ded6d9-f4a4-4b96-b068-d1b12c6f718a service nova] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Detach interface failed, port_id=885142d2-3a31-487c-b773-a0b0df2e4e40, reason: Instance f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 897.301620] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Successfully created port: 86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.306807] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925624, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54935} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.307229] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 565066c4-2f33-44c6-8e82-4c6d729cd0b7/565066c4-2f33-44c6-8e82-4c6d729cd0b7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.307314] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.308020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ea3be9c-690d-4710-bd08-e81b0319bf69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.314471] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 897.314471] env[69994]: value = "task-2925625" [ 897.314471] env[69994]: _type = "Task" [ 897.314471] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.325199] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925625, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.330815] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddaa853-aefe-4bd2-9ac5-04a9ef48c5db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.339032] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527453c0-9b0e-a8f3-be53-d4d1f70bc2f5, 'name': SearchDatastore_Task, 'duration_secs': 0.015501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.340909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.341275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] b80a405e-a02e-4b18-a325-753146533d1b/b80a405e-a02e-4b18-a325-753146533d1b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.341634] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b16522d-4711-4076-980a-fb4a3fddeea6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.344473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762c14c3-aa2b-4c71-b8b4-ba961a232ea7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.351329] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 897.351329] env[69994]: value = "task-2925626" [ 897.351329] env[69994]: _type = "Task" [ 897.351329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.380430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f73e3d-fde7-4f95-99d0-ba0bf02da148 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.387691] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.390888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b14ec16-41c9-46c9-b820-cf6f592d87d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.404745] env[69994]: DEBUG nova.compute.provider_tree [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.466465] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 897.486034] env[69994]: DEBUG nova.network.neutron [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.556055] env[69994]: INFO nova.compute.manager [-] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Took 1.30 seconds to deallocate network for instance. [ 897.569118] env[69994]: DEBUG nova.network.neutron [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.826296] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925625, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.826538] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.827314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b287e6d8-3c8c-4558-a70f-93b7f657adb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.849114] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 565066c4-2f33-44c6-8e82-4c6d729cd0b7/565066c4-2f33-44c6-8e82-4c6d729cd0b7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.849362] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4b7555f-4c8e-432e-93e8-18df453c513b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.869282] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 897.869282] env[69994]: value = "task-2925627" [ 897.869282] env[69994]: _type = "Task" [ 897.869282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.877204] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.884507] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440368} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.884810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] b80a405e-a02e-4b18-a325-753146533d1b/b80a405e-a02e-4b18-a325-753146533d1b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.885142] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.885430] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7e7141c-b4fc-44e5-bb48-9147de8899b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.891731] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 897.891731] env[69994]: value = "task-2925628" [ 897.891731] env[69994]: _type = "Task" [ 897.891731] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.899510] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925628, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.938122] env[69994]: DEBUG nova.scheduler.client.report [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 897.938523] env[69994]: DEBUG nova.compute.provider_tree [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 88 to 89 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 897.938832] env[69994]: DEBUG nova.compute.provider_tree [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.063297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.072253] env[69994]: DEBUG oslo_concurrency.lockutils [req-af04deea-4375-4b6f-8a1f-03b4230cd092 req-d5105e5d-4fe6-418a-a046-4f2be82fb563 service nova] Releasing lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.072605] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.072759] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.380233] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925627, 'name': ReconfigVM_Task, 'duration_secs': 0.490447} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.380531] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 565066c4-2f33-44c6-8e82-4c6d729cd0b7/565066c4-2f33-44c6-8e82-4c6d729cd0b7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.381196] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6dbbdaca-350d-421c-81ad-d6880178c0a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.387567] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 898.387567] env[69994]: value = "task-2925629" [ 898.387567] env[69994]: _type = "Task" [ 898.387567] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.399026] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925629, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.403569] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925628, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.403818] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.404618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011260cc-bd09-4f4b-8210-01592ba8a7ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.427420] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] b80a405e-a02e-4b18-a325-753146533d1b/b80a405e-a02e-4b18-a325-753146533d1b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.427777] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adaf6b6b-e108-430b-81f5-40d7e353ea52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.443863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.447273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.552s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.449120] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 898.449120] env[69994]: value = "task-2925630" [ 898.449120] env[69994]: _type = "Task" [ 898.449120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.458365] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925630, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.465340] env[69994]: INFO nova.scheduler.client.report [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleted allocations for instance 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f [ 898.474891] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 898.502753] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 898.503362] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.503362] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 898.503700] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.503700] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 898.503700] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 898.503952] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 898.504083] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 898.504855] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 898.504855] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 898.504855] env[69994]: DEBUG nova.virt.hardware [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 898.505759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de96ce8-fb06-48a9-b07d-72adb9b73c8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.515648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6804882-29e0-43f1-b982-b04e569f4424 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.606024] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.742733] env[69994]: DEBUG nova.network.neutron [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Updating instance_info_cache with network_info: [{"id": "02c12be4-6c2c-415c-bbf4-af57f2c1bb6f", "address": "fa:16:3e:d6:26:1d", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02c12be4-6c", "ovs_interfaceid": "02c12be4-6c2c-415c-bbf4-af57f2c1bb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.897879] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925629, 'name': Rename_Task, 'duration_secs': 0.230344} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.898172] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.898417] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12c93a19-92da-4af0-a973-f551b50f9a74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.904897] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 898.904897] env[69994]: value = "task-2925631" [ 898.904897] env[69994]: _type = "Task" [ 898.904897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.914023] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.981415] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925630, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.982901] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90fb2feb-2886-4a83-ab9c-1b7f4424a4ee tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "9ce0d8da-2366-469a-82cf-f2dcd4c7e44f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.869s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.989668] env[69994]: DEBUG nova.compute.manager [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Received event network-vif-plugged-86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.989961] env[69994]: DEBUG oslo_concurrency.lockutils [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] Acquiring lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.990288] env[69994]: DEBUG oslo_concurrency.lockutils [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.990514] env[69994]: DEBUG oslo_concurrency.lockutils [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.990753] env[69994]: DEBUG nova.compute.manager [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] No waiting events found dispatching network-vif-plugged-86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 898.990968] env[69994]: WARNING nova.compute.manager [req-32f12844-a37d-4240-b606-6ed22f26bcaa req-9f02a5fd-dabf-4b41-943e-ae2a88f2b534 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Received unexpected event network-vif-plugged-86f18e5a-d4c8-412a-976f-3cef54cfe490 for instance with vm_state building and task_state spawning. [ 899.098438] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Successfully updated port: 86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.245849] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "refresh_cache-309e5014-a43f-4346-9c11-036eb36c8c1f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.246245] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance network_info: |[{"id": "02c12be4-6c2c-415c-bbf4-af57f2c1bb6f", "address": "fa:16:3e:d6:26:1d", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02c12be4-6c", "ovs_interfaceid": "02c12be4-6c2c-415c-bbf4-af57f2c1bb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.246707] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:26:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02c12be4-6c2c-415c-bbf4-af57f2c1bb6f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.254222] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.254777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.255021] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fea07340-806f-49fe-96a2-3cf597d91308 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.275262] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.275262] env[69994]: value = "task-2925633" [ 899.275262] env[69994]: _type = "Task" [ 899.275262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.283217] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925633, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.414837] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925631, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.460520] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925630, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.497236] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 298a4d59-733f-4cda-a9c2-80dc21be91ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.497475] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 627f89ad-0381-4de9-a429-c74e26975ce9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.497590] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9e9973e1-feb8-4fd7-95ae-e6d824af5a64 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 899.497716] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 63d6a59a-d58c-4179-ad39-eb9863e6f84c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.497848] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 899.497966] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance b00d09ea-5eee-47ed-adcb-288cdd362e89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498113] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ab320e59-febb-4f8f-9bc4-74227d29c752 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498249] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9b6aca3c-337b-4067-80e0-487d956fabc7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 899.498360] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f1f0d79f-dc67-4cf9-816c-c451f20d65ca actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498471] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance c06a2540-e77d-48c0-967f-94e2a53c4d8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498587] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498692] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 234c2683-80f3-4f29-bcc9-9853338128bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498800] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance b4c6b628-426e-4efc-b8b6-0c2937ef6df3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.498919] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 9d146d57-9948-4b18-a3f3-675b53d137ed is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 899.499045] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 899.499209] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5acdf02b-f61c-46ff-9c36-8e86b9be7738 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499346] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 06fa5ab5-baab-466e-8574-5391247c13a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499461] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d5af7ae1-d68e-4170-b762-e56d7f2551d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499571] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 565066c4-2f33-44c6-8e82-4c6d729cd0b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499686] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance b80a405e-a02e-4b18-a325-753146533d1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499789] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 309e5014-a43f-4346-9c11-036eb36c8c1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.499918] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 767ecd3d-631d-43b5-8ebf-28b6cb2077e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 899.601836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.601836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquired lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.602076] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.786068] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925633, 'name': CreateVM_Task, 'duration_secs': 0.429891} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.786068] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.786068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.786330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.786599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.786855] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b8e127e-16d8-4c40-9620-dbede251a43f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.791500] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 899.791500] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528126a0-bf2e-f3d6-3376-f909a589bce5" [ 899.791500] env[69994]: _type = "Task" [ 899.791500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.804505] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528126a0-bf2e-f3d6-3376-f909a589bce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.914805] env[69994]: DEBUG oslo_vmware.api [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925631, 'name': PowerOnVM_Task, 'duration_secs': 0.620212} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.915176] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.915422] env[69994]: INFO nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Took 9.49 seconds to spawn the instance on the hypervisor. [ 899.915667] env[69994]: DEBUG nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.916469] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bb2529-70e3-4a9e-a0c2-78b02b50c1a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.959778] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925630, 'name': ReconfigVM_Task, 'duration_secs': 1.28487} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.959778] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Reconfigured VM instance instance-0000003c to attach disk [datastore2] b80a405e-a02e-4b18-a325-753146533d1b/b80a405e-a02e-4b18-a325-753146533d1b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.960494] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41f239d8-c710-4d1a-a45f-97ae2e5be5ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.967463] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 899.967463] env[69994]: value = "task-2925634" [ 899.967463] env[69994]: _type = "Task" [ 899.967463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.975285] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925634, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.002978] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f0b77732-aae1-4790-a2c7-75586e78eda6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 900.134962] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.280000] env[69994]: DEBUG nova.network.neutron [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Updating instance_info_cache with network_info: [{"id": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "address": "fa:16:3e:c0:fb:9a", "network": {"id": "f3754c34-dc25-4495-9d18-b1c78bacbdeb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1404815549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93248f8d8f3b435ea8cbe998a1b3aa50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86f18e5a-d4", "ovs_interfaceid": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.301936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.302204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Processing image 136e080b-9934-48bc-87a1-4505254582a3 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.302446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.302589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.302767] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.303289] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0fda65f-0634-49d4-95f4-9838241be99e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.320188] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.320348] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.321050] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29316871-16dd-494a-aba1-14cd5f901911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.327905] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 900.327905] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bc24f8-182d-2eab-f12a-ba06368b1635" [ 900.327905] env[69994]: _type = "Task" [ 900.327905] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.335687] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bc24f8-182d-2eab-f12a-ba06368b1635, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.443341] env[69994]: INFO nova.compute.manager [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Took 40.34 seconds to build instance. [ 900.479198] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925634, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.506030] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 43119e21-5226-482c-b640-33e73051a563 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 900.782398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Releasing lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.782779] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Instance network_info: |[{"id": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "address": "fa:16:3e:c0:fb:9a", "network": {"id": "f3754c34-dc25-4495-9d18-b1c78bacbdeb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1404815549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93248f8d8f3b435ea8cbe998a1b3aa50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86f18e5a-d4", "ovs_interfaceid": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 900.783314] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:fb:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '233536d0-6913-4879-8442-42dcf1d4ecbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86f18e5a-d4c8-412a-976f-3cef54cfe490', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.790909] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Creating folder: Project (93248f8d8f3b435ea8cbe998a1b3aa50). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 900.791688] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0785cdc9-f107-47cf-8e0f-cde594935782 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.801798] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Created folder: Project (93248f8d8f3b435ea8cbe998a1b3aa50) in parent group-v587342. [ 900.801987] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Creating folder: Instances. Parent ref: group-v587512. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 900.802241] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16ba4405-af2b-43eb-aa4e-2afdaefcbba7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.809989] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Created folder: Instances in parent group-v587512. [ 900.810249] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.810452] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 900.810657] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c772f6-1c2c-4115-93ec-b4b274b8b8ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.829637] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.829637] env[69994]: value = "task-2925637" [ 900.829637] env[69994]: _type = "Task" [ 900.829637] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.843417] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 900.843690] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Fetch image to [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514/OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 900.843894] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Downloading stream optimized image 136e080b-9934-48bc-87a1-4505254582a3 to [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514/OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 900.844080] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Downloading image file data 136e080b-9934-48bc-87a1-4505254582a3 to the ESX as VM named 'OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 900.845925] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925637, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.921491] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 900.921491] env[69994]: value = "resgroup-9" [ 900.921491] env[69994]: _type = "ResourcePool" [ 900.921491] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 900.921842] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-06813856-61f0-4473-b3bc-ec67b8135560 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.942557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cd4934d-2deb-4b8e-b860-42eb44edfd7f tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.848s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.944170] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease: (returnval){ [ 900.944170] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 900.944170] env[69994]: _type = "HttpNfcLease" [ 900.944170] env[69994]: } obtained for vApp import into resource pool (val){ [ 900.944170] env[69994]: value = "resgroup-9" [ 900.944170] env[69994]: _type = "ResourcePool" [ 900.944170] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 900.944432] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the lease: (returnval){ [ 900.944432] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 900.944432] env[69994]: _type = "HttpNfcLease" [ 900.944432] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 900.950720] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 900.950720] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 900.950720] env[69994]: _type = "HttpNfcLease" [ 900.950720] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 900.977777] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925634, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.008863] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d28a6129-1bfe-40da-bc91-c68cf874aa36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.019542] env[69994]: DEBUG nova.compute.manager [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Received event network-changed-86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.019751] env[69994]: DEBUG nova.compute.manager [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Refreshing instance network info cache due to event network-changed-86f18e5a-d4c8-412a-976f-3cef54cfe490. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 901.019971] env[69994]: DEBUG oslo_concurrency.lockutils [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] Acquiring lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.020142] env[69994]: DEBUG oslo_concurrency.lockutils [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] Acquired lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.020284] env[69994]: DEBUG nova.network.neutron [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Refreshing network info cache for port 86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.163545] env[69994]: DEBUG nova.compute.manager [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.163809] env[69994]: DEBUG nova.compute.manager [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing instance network info cache due to event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 901.164098] env[69994]: DEBUG oslo_concurrency.lockutils [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] Acquiring lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.164300] env[69994]: DEBUG oslo_concurrency.lockutils [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] Acquired lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.164507] env[69994]: DEBUG nova.network.neutron [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.342725] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925637, 'name': CreateVM_Task, 'duration_secs': 0.495359} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.342929] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 901.343658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.343851] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.344650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 901.344650] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cace3e19-a475-405b-874e-5a50a913a8eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.349404] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 901.349404] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226a2d2-ca23-8997-7075-63f61c121eae" [ 901.349404] env[69994]: _type = "Task" [ 901.349404] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.357340] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226a2d2-ca23-8997-7075-63f61c121eae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.452366] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 901.452366] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 901.452366] env[69994]: _type = "HttpNfcLease" [ 901.452366] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 901.477881] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925634, 'name': Rename_Task, 'duration_secs': 1.155865} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.479594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.479594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20881246-5f78-458a-8f41-ea7e97e12f67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.486593] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 901.486593] env[69994]: value = "task-2925639" [ 901.486593] env[69994]: _type = "Task" [ 901.486593] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.496484] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.512413] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.512785] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 901.512910] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 901.697342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.697587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.697819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.697994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.698233] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.700460] env[69994]: INFO nova.compute.manager [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Terminating instance [ 901.857230] env[69994]: DEBUG nova.network.neutron [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Updated VIF entry in instance network info cache for port 86f18e5a-d4c8-412a-976f-3cef54cfe490. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.858040] env[69994]: DEBUG nova.network.neutron [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Updating instance_info_cache with network_info: [{"id": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "address": "fa:16:3e:c0:fb:9a", "network": {"id": "f3754c34-dc25-4495-9d18-b1c78bacbdeb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1404815549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93248f8d8f3b435ea8cbe998a1b3aa50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86f18e5a-d4", "ovs_interfaceid": "86f18e5a-d4c8-412a-976f-3cef54cfe490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.876970] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226a2d2-ca23-8997-7075-63f61c121eae, 'name': SearchDatastore_Task, 'duration_secs': 0.017441} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.880822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.881230] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.881624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.881896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.882227] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.883265] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-977e0dfa-5319-4dc7-acad-6fd48de4d15b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.894477] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.894661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.898259] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef80c31d-00ec-4762-9313-425c0609140c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.906058] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 901.906058] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0e943-24dd-ced6-9846-7533faeb2ad9" [ 901.906058] env[69994]: _type = "Task" [ 901.906058] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.913058] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0e943-24dd-ced6-9846-7533faeb2ad9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.936832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1ee6a0-f7c5-4128-92ec-a575a1c94471 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.945166] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943b7040-6603-4e72-9372-a4ab893b1860 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.952175] env[69994]: DEBUG nova.network.neutron [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updated VIF entry in instance network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.952519] env[69994]: DEBUG nova.network.neutron [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updating instance_info_cache with network_info: [{"id": "2c8ad636-498e-4d08-8915-5d11ff684a84", "address": "fa:16:3e:95:ea:01", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c8ad636-49", "ovs_interfaceid": "2c8ad636-498e-4d08-8915-5d11ff684a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.982225] env[69994]: DEBUG oslo_concurrency.lockutils [req-bdf3d858-9c29-4934-ba4f-2edbae71563f req-095aed14-21b7-4c4a-b4e5-40ae2fc4f167 service nova] Releasing lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.982280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca615cf3-0b0f-4fbb-900f-9c7686fa9041 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.989017] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 901.989017] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 901.989017] env[69994]: _type = "HttpNfcLease" [ 901.989017] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 901.989017] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 901.989017] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5264dba1-9681-3f7b-4d33-38d6e117ad53" [ 901.989017] env[69994]: _type = "HttpNfcLease" [ 901.989017] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 901.990696] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a4dcd8-b19a-48c7-acdf-5f0c3916e0a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.996771] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b011f4-493f-4917-962c-6caf59eaba7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.006246] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 902.006426] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 902.007767] env[69994]: DEBUG oslo_vmware.api [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925639, 'name': PowerOnVM_Task, 'duration_secs': 0.461039} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.008344] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.008566] env[69994]: INFO nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Took 9.59 seconds to spawn the instance on the hypervisor. [ 902.008750] env[69994]: DEBUG nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.010245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152344dc-1701-4d29-9b90-fd6dbc0ae695 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.074892] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.084023] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-508d6f8f-3e47-4fe1-9a34-80f883e4635a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.204403] env[69994]: DEBUG nova.compute.manager [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.204911] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.205557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a374ff-c89e-4448-9a75-637e2f71a264 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.213374] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.213625] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60ac1bc0-71ae-4653-8e75-e53922000f5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.219689] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 902.219689] env[69994]: value = "task-2925640" [ 902.219689] env[69994]: _type = "Task" [ 902.219689] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.227066] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.366566] env[69994]: DEBUG oslo_concurrency.lockutils [req-f400482f-ba1c-40cc-8f58-d15b01a0cf45 req-298320e3-951f-4015-9a83-08e00064e4af service nova] Releasing lock "refresh_cache-767ecd3d-631d-43b5-8ebf-28b6cb2077e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.420010] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0e943-24dd-ced6-9846-7533faeb2ad9, 'name': SearchDatastore_Task, 'duration_secs': 0.025149} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.421540] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d30de09e-ddf1-4c47-91aa-0d3c409f42e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.427117] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 902.427117] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a3304-af8d-05fe-31de-ce53c1a53c6f" [ 902.427117] env[69994]: _type = "Task" [ 902.427117] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.437662] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a3304-af8d-05fe-31de-ce53c1a53c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.579222] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.597938] env[69994]: INFO nova.compute.manager [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Took 40.98 seconds to build instance. [ 902.731588] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925640, 'name': PowerOffVM_Task, 'duration_secs': 0.197881} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.731957] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.732150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.732418] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4acab140-126f-49a2-ae83-31e538b59b86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.795258] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.795644] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.796295] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleting the datastore file [datastore1] b00d09ea-5eee-47ed-adcb-288cdd362e89 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.796295] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8068d39e-4287-4681-9747-6b05ee921846 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.803310] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for the task: (returnval){ [ 902.803310] env[69994]: value = "task-2925642" [ 902.803310] env[69994]: _type = "Task" [ 902.803310] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.815149] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.858154] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 902.858154] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 902.859030] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed22d210-8da3-4509-bf25-050fffa9723b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.864569] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 902.864738] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 902.864963] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-81ad2add-f512-4bf4-9711-3fc1f9ef6794 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.941880] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a3304-af8d-05fe-31de-ce53c1a53c6f, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.942231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.942535] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 767ecd3d-631d-43b5-8ebf-28b6cb2077e9/767ecd3d-631d-43b5-8ebf-28b6cb2077e9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.942858] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24c4fed4-fdf0-4a2c-a8b8-5ec5c494b15b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.949451] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 902.949451] env[69994]: value = "task-2925643" [ 902.949451] env[69994]: _type = "Task" [ 902.949451] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.957768] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.057019] env[69994]: DEBUG nova.compute.manager [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.057275] env[69994]: DEBUG nova.compute.manager [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing instance network info cache due to event network-changed-2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 903.057495] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] Acquiring lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.057670] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] Acquired lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.057846] env[69994]: DEBUG nova.network.neutron [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Refreshing network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.084271] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 903.084470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.637s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.084731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.880s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.086292] env[69994]: INFO nova.compute.claims [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.089134] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.089312] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 903.100023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-794edc06-2b55-4729-9718-1bb1f3bfb331 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.497s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.143807] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52806465-5baa-9735-4c6d-d2bd99053cdd/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 903.144055] env[69994]: INFO nova.virt.vmwareapi.images [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Downloaded image file data 136e080b-9934-48bc-87a1-4505254582a3 [ 903.144885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32b8283-bb41-4b29-8fa9-acfe4db195be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.166799] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26e9c766-d1d1-44fb-a9be-0afb72cdb911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.223354] env[69994]: INFO nova.virt.vmwareapi.images [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] The imported VM was unregistered [ 903.226550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 903.226794] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.227084] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-807521e5-9c14-4b21-9037-dd2086895583 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.255843] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.256115] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514/OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514.vmdk to [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 903.256431] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-0c96faff-d455-432f-b356-c13c84b890a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.264692] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 903.264692] env[69994]: value = "task-2925645" [ 903.264692] env[69994]: _type = "Task" [ 903.264692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.272699] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.314819] env[69994]: DEBUG oslo_vmware.api [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Task: {'id': task-2925642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250789} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.315663] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.315663] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.315663] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.315865] env[69994]: INFO nova.compute.manager [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Took 1.11 seconds to destroy the instance on the hypervisor. [ 903.316110] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.316325] env[69994]: DEBUG nova.compute.manager [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 903.316427] env[69994]: DEBUG nova.network.neutron [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.459512] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925643, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.533266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "b80a405e-a02e-4b18-a325-753146533d1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.533552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.533770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "b80a405e-a02e-4b18-a325-753146533d1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.533959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.534144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.536714] env[69994]: INFO nova.compute.manager [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Terminating instance [ 903.601776] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] There are 40 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 903.602178] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9ce0d8da-2366-469a-82cf-f2dcd4c7e44f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.774909] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.837684] env[69994]: DEBUG nova.network.neutron [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updated VIF entry in instance network info cache for port 2c8ad636-498e-4d08-8915-5d11ff684a84. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.841197] env[69994]: DEBUG nova.network.neutron [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updating instance_info_cache with network_info: [{"id": "2c8ad636-498e-4d08-8915-5d11ff684a84", "address": "fa:16:3e:95:ea:01", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c8ad636-49", "ovs_interfaceid": "2c8ad636-498e-4d08-8915-5d11ff684a84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.960856] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.961308] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 767ecd3d-631d-43b5-8ebf-28b6cb2077e9/767ecd3d-631d-43b5-8ebf-28b6cb2077e9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.961621] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.961985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5901579b-7309-4a9d-8a46-d3425a0d907c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.969652] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 903.969652] env[69994]: value = "task-2925646" [ 903.969652] env[69994]: _type = "Task" [ 903.969652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.978178] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.042112] env[69994]: DEBUG nova.compute.manager [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 904.042431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.043460] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b046b7b9-ef73-4fed-8b05-727e6154b77c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.051565] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.051851] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8d94d1f-5f19-4dbd-b18c-842e58abe397 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.058309] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 904.058309] env[69994]: value = "task-2925647" [ 904.058309] env[69994]: _type = "Task" [ 904.058309] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.069030] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.106974] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: bb062ddc-5281-4957-bb9d-8f5c0b0ba526] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.233604] env[69994]: DEBUG nova.compute.manager [req-7055a6e1-d25c-42d2-8503-b669b05af202 req-88d4ae8c-5530-4efc-a5bd-cb8ed956ddfe service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Received event network-vif-deleted-c8e0f8d2-cf57-4669-8fe4-3fec8b7232df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.233604] env[69994]: INFO nova.compute.manager [req-7055a6e1-d25c-42d2-8503-b669b05af202 req-88d4ae8c-5530-4efc-a5bd-cb8ed956ddfe service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Neutron deleted interface c8e0f8d2-cf57-4669-8fe4-3fec8b7232df; detaching it from the instance and deleting it from the info cache [ 904.233604] env[69994]: DEBUG nova.network.neutron [req-7055a6e1-d25c-42d2-8503-b669b05af202 req-88d4ae8c-5530-4efc-a5bd-cb8ed956ddfe service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.277026] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.341973] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3015ef8-c58a-4af2-94e2-d08c5c9a6f8c req-7ee418d7-f5da-445a-a654-0c9e93ab5c1d service nova] Releasing lock "refresh_cache-565066c4-2f33-44c6-8e82-4c6d729cd0b7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.484726] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131936} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.488667] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.490108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa93caeb-a580-49cd-be8d-27cc8ebc24a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.529019] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 767ecd3d-631d-43b5-8ebf-28b6cb2077e9/767ecd3d-631d-43b5-8ebf-28b6cb2077e9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.536377] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bc78a5d-82e3-4e80-8c1f-a231fe72f628 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.551786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.551786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.559428] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 904.559428] env[69994]: value = "task-2925648" [ 904.559428] env[69994]: _type = "Task" [ 904.559428] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.574651] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925647, 'name': PowerOffVM_Task, 'duration_secs': 0.419939} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.578584] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.578814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 904.579207] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.582297] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5edcfe0-e35a-4691-8759-e1744d202ae6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.609522] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c06e7e0-54e5-4b97-ae9a-2cc4178c561b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.616639] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 67f5ad56-9455-43fc-b940-8a67974703cc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.619812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976f3a14-9d15-48a3-ae65-1f56f8e68d04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.653550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1db88b-e3b0-4400-bfec-939d33f2c473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.660594] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.661720] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.661720] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleting the datastore file [datastore2] b80a405e-a02e-4b18-a325-753146533d1b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.663427] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b4feb9b-229a-496d-8b06-64af8b74c6a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.668942] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82604d9-bdde-4b1c-9210-f0fd1e280c86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.683447] env[69994]: DEBUG nova.network.neutron [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.685177] env[69994]: DEBUG nova.compute.provider_tree [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 904.690736] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for the task: (returnval){ [ 904.690736] env[69994]: value = "task-2925650" [ 904.690736] env[69994]: _type = "Task" [ 904.690736] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.695979] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.739266] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef4bacc3-39ce-4d51-a18c-2b2cdd2fbaaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.748913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08bbadf-319f-47ec-af98-49e6ab75263e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.784689] env[69994]: DEBUG nova.compute.manager [req-7055a6e1-d25c-42d2-8503-b669b05af202 req-88d4ae8c-5530-4efc-a5bd-cb8ed956ddfe service nova] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Detach interface failed, port_id=c8e0f8d2-cf57-4669-8fe4-3fec8b7232df, reason: Instance b00d09ea-5eee-47ed-adcb-288cdd362e89 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 904.789895] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.054766] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.070847] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925648, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.125038] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 203bc0d6-c149-4c3d-9ac7-962210d6b01d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.187631] env[69994]: INFO nova.compute.manager [-] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Took 1.87 seconds to deallocate network for instance. [ 905.209053] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.219312] env[69994]: ERROR nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [req-1ff22ff6-2f6d-4b11-a38f-dab3c47b3058] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1ff22ff6-2f6d-4b11-a38f-dab3c47b3058"}]} [ 905.238449] env[69994]: DEBUG nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 905.255885] env[69994]: DEBUG nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 905.256700] env[69994]: DEBUG nova.compute.provider_tree [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.275417] env[69994]: DEBUG nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 905.283613] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.295372] env[69994]: DEBUG nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 905.573952] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925648, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.578372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.628244] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f6408fad-a6b8-4868-a192-3acd065935ea] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.658316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91f286f-bcba-4dba-89c0-67aa7be77614 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.666868] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cf9f54-8b46-4f60-b3bd-1d5b08bb11b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.701797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319ef4d2-67cb-454d-b1a7-4d1625ec9e05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.704925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.712546] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.713818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dca2cdc-b076-44df-9e33-2ffed90db817 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.727675] env[69994]: DEBUG nova.compute.provider_tree [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.777602] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.073605] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925648, 'name': ReconfigVM_Task, 'duration_secs': 1.364184} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.073880] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 767ecd3d-631d-43b5-8ebf-28b6cb2077e9/767ecd3d-631d-43b5-8ebf-28b6cb2077e9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.074521] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-475c529c-2ee5-432e-9768-050e79abf4b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.080633] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 906.080633] env[69994]: value = "task-2925651" [ 906.080633] env[69994]: _type = "Task" [ 906.080633] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.088456] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925651, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.132160] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 956306bc-4701-4c04-8221-8ec0b9df73ca] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.208790] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.257423] env[69994]: DEBUG nova.scheduler.client.report [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 906.257762] env[69994]: DEBUG nova.compute.provider_tree [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 90 to 91 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 906.257956] env[69994]: DEBUG nova.compute.provider_tree [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.279398] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.590839] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925651, 'name': Rename_Task, 'duration_secs': 0.143906} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.591142] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.591405] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b3165fb-4590-476c-b56e-ae739d7f26d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.597615] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 906.597615] env[69994]: value = "task-2925652" [ 906.597615] env[69994]: _type = "Task" [ 906.597615] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.605309] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925652, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.635647] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 3c814c83-20cc-4871-9f30-5c0c7d99b8a1] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.707218] env[69994]: DEBUG oslo_vmware.api [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Task: {'id': task-2925650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.611378} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.707473] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.707654] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.708167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.708381] env[69994]: INFO nova.compute.manager [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Took 2.67 seconds to destroy the instance on the hypervisor. [ 906.708629] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.708861] env[69994]: DEBUG nova.compute.manager [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.708989] env[69994]: DEBUG nova.network.neutron [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.763089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.678s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.765064] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.766284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.985s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.767721] env[69994]: INFO nova.compute.claims [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.779056] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925645, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.084551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.779341] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514/OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514.vmdk to [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk. [ 906.779527] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Cleaning up location [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 906.779693] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e18a94e6-923f-4c4c-9bfb-452fa78d1514 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.779935] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71eebe82-0e75-463a-a6b2-8e3a45de41ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.786828] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 906.786828] env[69994]: value = "task-2925653" [ 906.786828] env[69994]: _type = "Task" [ 906.786828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.794742] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.942500] env[69994]: DEBUG nova.compute.manager [req-72100de1-aafd-48a2-9234-e0701d5de57d req-8241b97a-8985-4652-9985-a6e7188067cf service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Received event network-vif-deleted-aabfeee4-193c-4644-afc0-35960f4acff8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 906.942727] env[69994]: INFO nova.compute.manager [req-72100de1-aafd-48a2-9234-e0701d5de57d req-8241b97a-8985-4652-9985-a6e7188067cf service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Neutron deleted interface aabfeee4-193c-4644-afc0-35960f4acff8; detaching it from the instance and deleting it from the info cache [ 906.942941] env[69994]: DEBUG nova.network.neutron [req-72100de1-aafd-48a2-9234-e0701d5de57d req-8241b97a-8985-4652-9985-a6e7188067cf service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.109069] env[69994]: DEBUG oslo_vmware.api [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925652, 'name': PowerOnVM_Task, 'duration_secs': 0.451984} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.109069] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.109268] env[69994]: INFO nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Took 8.63 seconds to spawn the instance on the hypervisor. [ 907.109384] env[69994]: DEBUG nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.110217] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6e276c-3273-4595-9822-7cbf27902510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.139246] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9269e42b-b05c-4c88-9008-aaeda4b0248f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.272138] env[69994]: DEBUG nova.compute.utils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 907.275610] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 907.275830] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.297291] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060945} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.297544] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.297736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.297994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk to [datastore2] 309e5014-a43f-4346-9c11-036eb36c8c1f/309e5014-a43f-4346-9c11-036eb36c8c1f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.298290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-117d67ee-6808-4261-bc59-4c678a0a712b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.304981] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 907.304981] env[69994]: value = "task-2925654" [ 907.304981] env[69994]: _type = "Task" [ 907.304981] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.312728] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.314191] env[69994]: DEBUG nova.policy [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8a8bcbbe1454049982f693dbfa19790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c545eb835008401ab8672be30dbcdad9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.426445] env[69994]: DEBUG nova.network.neutron [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.445086] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64636682-b05a-4545-90c1-2ba0fddd47eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.455154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5724ed33-9368-4709-9692-fe622dfa9042 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.491421] env[69994]: DEBUG nova.compute.manager [req-72100de1-aafd-48a2-9234-e0701d5de57d req-8241b97a-8985-4652-9985-a6e7188067cf service nova] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Detach interface failed, port_id=aabfeee4-193c-4644-afc0-35960f4acff8, reason: Instance b80a405e-a02e-4b18-a325-753146533d1b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 907.628554] env[69994]: INFO nova.compute.manager [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Took 41.67 seconds to build instance. [ 907.643212] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9a1343a8-11b4-4c9e-8445-931eab036a4d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.688575] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Successfully created port: 0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.779328] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.820220] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.929299] env[69994]: INFO nova.compute.manager [-] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Took 1.22 seconds to deallocate network for instance. [ 908.130576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4fbf34c-0e86-4297-8eec-8dbcfd24e048 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.179s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.147118] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 03a10403-0253-4df0-84b2-1e56f0c057fe] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.192253] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b23366-16a1-4a24-9e35-336fecb83d66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.201317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1695642a-ad55-4757-9bcc-4c46878de2e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.234951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44864e49-5410-42c0-a944-b5024840c550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.243100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d220d03-a5ae-46e2-b7fc-715141aceaa0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.257556] env[69994]: DEBUG nova.compute.provider_tree [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.316806] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.436554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.524683] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.524976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.525268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.525508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.525884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.529191] env[69994]: INFO nova.compute.manager [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Terminating instance [ 908.650823] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 00ab07b7-e7ed-4a71-b684-d5af8b1b7616] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.761081] env[69994]: DEBUG nova.scheduler.client.report [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.791870] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.817422] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.826649] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.826899] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.827092] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.827245] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.827386] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.827546] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.827792] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.827960] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.828156] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.828321] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.828487] env[69994]: DEBUG nova.virt.hardware [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.829619] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f65e74-e721-4319-b07f-bf47c998189f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.838032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b242e58-8ba7-47fc-aba4-133c3dd966ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.033908] env[69994]: DEBUG nova.compute.manager [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.034241] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.035242] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a9ef73-c613-43e8-b0e8-44757947a8bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.043660] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.044230] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb132f96-995a-45fa-a936-90c7b3c99947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.051039] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 909.051039] env[69994]: value = "task-2925655" [ 909.051039] env[69994]: _type = "Task" [ 909.051039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.060453] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.156156] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 6fb97a65-bf0b-4e79-9611-f0f3179661b5] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.165016] env[69994]: DEBUG nova.compute.manager [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Received event network-vif-plugged-0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.165273] env[69994]: DEBUG oslo_concurrency.lockutils [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.165483] env[69994]: DEBUG oslo_concurrency.lockutils [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.165651] env[69994]: DEBUG oslo_concurrency.lockutils [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.165851] env[69994]: DEBUG nova.compute.manager [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] No waiting events found dispatching network-vif-plugged-0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 909.166051] env[69994]: WARNING nova.compute.manager [req-2862036d-1312-4249-a50f-9ee27a2838ad req-ae517f53-020a-479d-8e58-be7b25385fec service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Received unexpected event network-vif-plugged-0f0159af-9c04-46fe-8fac-ebd620726fd7 for instance with vm_state building and task_state spawning. [ 909.266046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.266813] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 909.269558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.538s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.269805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.271966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.970s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.272370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.274567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.842s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.276319] env[69994]: INFO nova.compute.claims [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.291306] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Successfully updated port: 0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.303835] env[69994]: INFO nova.scheduler.client.report [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Deleted allocations for instance 9e9973e1-feb8-4fd7-95ae-e6d824af5a64 [ 909.310120] env[69994]: INFO nova.scheduler.client.report [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted allocations for instance 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7 [ 909.326427] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.561683] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925655, 'name': PowerOffVM_Task, 'duration_secs': 0.184844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.561980] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.562182] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.562460] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01c2466d-d80c-4b5a-92cf-5a28f15f82ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.626909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.626909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.626909] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Deleting the datastore file [datastore1] 767ecd3d-631d-43b5-8ebf-28b6cb2077e9 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.627190] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d70246f1-4d64-4a1f-92ea-b722dde52bf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.634367] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for the task: (returnval){ [ 909.634367] env[69994]: value = "task-2925657" [ 909.634367] env[69994]: _type = "Task" [ 909.634367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.645633] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925657, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.659719] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 180b4236-289c-4818-885d-c66e9e9a2ea8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.781585] env[69994]: DEBUG nova.compute.utils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.785602] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.785602] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.794281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.794450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.794582] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.824555] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925654, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.359528} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.826184] env[69994]: DEBUG nova.policy [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbf35bd9d3ef4340bababe1ae90f65bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '522a6d0ba5a54a3f9e6cdb0cb1d18b6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.827915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64325c34-550b-4ebc-9970-f34bfe07f244 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "2358d8f6-7fbc-4f30-93ad-27f4d96aefa7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.237s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.828798] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/136e080b-9934-48bc-87a1-4505254582a3/136e080b-9934-48bc-87a1-4505254582a3.vmdk to [datastore2] 309e5014-a43f-4346-9c11-036eb36c8c1f/309e5014-a43f-4346-9c11-036eb36c8c1f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.829312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-50942682-67e8-4f53-81f4-a92459282ca4 tempest-VolumesAssistedSnapshotsTest-1773208599 tempest-VolumesAssistedSnapshotsTest-1773208599-project-member] Lock "9e9973e1-feb8-4fd7-95ae-e6d824af5a64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.392s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.830730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c3ffd7-07d8-4ae8-ba17-fa2fbb527621 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.856072] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 309e5014-a43f-4346-9c11-036eb36c8c1f/309e5014-a43f-4346-9c11-036eb36c8c1f.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.856624] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fae3ac1-b473-4158-96c8-65792ccce7c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.878601] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 909.878601] env[69994]: value = "task-2925658" [ 909.878601] env[69994]: _type = "Task" [ 909.878601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.888877] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.144809] env[69994]: DEBUG oslo_vmware.api [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Task: {'id': task-2925657, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.145085] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.145301] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.145498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.145697] env[69994]: INFO nova.compute.manager [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 910.145981] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.146207] env[69994]: DEBUG nova.compute.manager [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.146310] env[69994]: DEBUG nova.network.neutron [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.155450] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Successfully created port: 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.162936] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e0764e41-0810-45a1-8917-ac901f0f8321] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.182840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "627f89ad-0381-4de9-a429-c74e26975ce9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.183100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.183348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.183538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.183718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.185872] env[69994]: INFO nova.compute.manager [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Terminating instance [ 910.286167] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.361396] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.391239] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925658, 'name': ReconfigVM_Task, 'duration_secs': 0.298599} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.391608] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 309e5014-a43f-4346-9c11-036eb36c8c1f/309e5014-a43f-4346-9c11-036eb36c8c1f.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.392219] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f284f18a-1e7e-4cb6-b667-23d2f3f8a8bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.398624] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 910.398624] env[69994]: value = "task-2925659" [ 910.398624] env[69994]: _type = "Task" [ 910.398624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.406565] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925659, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.488465] env[69994]: DEBUG nova.compute.manager [req-1aeb45e2-122e-47a5-ba1f-d0fe12a4250f req-52220bf2-4e06-44d0-9b02-8744aa202a37 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Received event network-vif-deleted-86f18e5a-d4c8-412a-976f-3cef54cfe490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 910.488684] env[69994]: INFO nova.compute.manager [req-1aeb45e2-122e-47a5-ba1f-d0fe12a4250f req-52220bf2-4e06-44d0-9b02-8744aa202a37 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Neutron deleted interface 86f18e5a-d4c8-412a-976f-3cef54cfe490; detaching it from the instance and deleting it from the info cache [ 910.488885] env[69994]: DEBUG nova.network.neutron [req-1aeb45e2-122e-47a5-ba1f-d0fe12a4250f req-52220bf2-4e06-44d0-9b02-8744aa202a37 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.552442] env[69994]: DEBUG nova.network.neutron [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.642069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40682410-8b60-4e43-9e1b-92e75bc7a7fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.648440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6343e93d-f740-438b-8ae7-bf0ede2b7a2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.677052] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: a589ddb9-947b-4ff4-94f6-1fab4bdb874b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.679494] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ad749f-bd26-48d1-8ab6-afdf2b169fde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.686708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb459025-eebb-4c3c-8982-30fdec09dd31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.690954] env[69994]: DEBUG nova.compute.manager [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.691178] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.691887] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd280ea2-d5d4-46ba-9667-7b8f1f8fdc33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.704517] env[69994]: DEBUG nova.compute.provider_tree [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.708685] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.708685] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-753cf18d-220b-4714-be6a-c3e52af86584 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.714276] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 910.714276] env[69994]: value = "task-2925660" [ 910.714276] env[69994]: _type = "Task" [ 910.714276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.723238] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.911251] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925659, 'name': Rename_Task, 'duration_secs': 0.169627} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.911251] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.911251] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19a4f56e-9d48-42f6-8d68-6c4c95dc5835 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.917407] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 910.917407] env[69994]: value = "task-2925661" [ 910.917407] env[69994]: _type = "Task" [ 910.917407] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.926257] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.949676] env[69994]: DEBUG nova.network.neutron [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.997070] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-067f4426-71cd-42ae-b26a-57d8990850c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.006490] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381af190-ff04-4837-98b4-6fdb65aa8d8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.039767] env[69994]: DEBUG nova.compute.manager [req-1aeb45e2-122e-47a5-ba1f-d0fe12a4250f req-52220bf2-4e06-44d0-9b02-8744aa202a37 service nova] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Detach interface failed, port_id=86f18e5a-d4c8-412a-976f-3cef54cfe490, reason: Instance 767ecd3d-631d-43b5-8ebf-28b6cb2077e9 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 911.054610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.054947] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Instance network_info: |[{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 911.055365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:73:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f0159af-9c04-46fe-8fac-ebd620726fd7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.063766] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating folder: Project (c545eb835008401ab8672be30dbcdad9). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 911.064087] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be31c311-d45a-469b-b4c7-83629902b34b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.074602] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created folder: Project (c545eb835008401ab8672be30dbcdad9) in parent group-v587342. [ 911.076035] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating folder: Instances. Parent ref: group-v587516. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 911.076035] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df1f17fa-94a2-400e-9150-c63f4c6b4cd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.085826] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created folder: Instances in parent group-v587516. [ 911.086227] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.086440] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.086731] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b2a214e-d165-4582-bd61-3399dda8aa4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.107657] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.107657] env[69994]: value = "task-2925664" [ 911.107657] env[69994]: _type = "Task" [ 911.107657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.115845] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925664, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.182694] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e46b8a11-650a-4e34-bc4a-e1c1b2515e76] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.196380] env[69994]: DEBUG nova.compute.manager [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Received event network-changed-0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.196578] env[69994]: DEBUG nova.compute.manager [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Refreshing instance network info cache due to event network-changed-0f0159af-9c04-46fe-8fac-ebd620726fd7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.196794] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.196937] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.198448] env[69994]: DEBUG nova.network.neutron [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Refreshing network info cache for port 0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.209163] env[69994]: DEBUG nova.scheduler.client.report [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.223702] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925660, 'name': PowerOffVM_Task, 'duration_secs': 0.189354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.224492] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.224662] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.225356] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b1aeeaf-53b9-430f-be83-7be59b3cdb9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.291765] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.292009] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.292207] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore2] 627f89ad-0381-4de9-a429-c74e26975ce9 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.292468] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da80e023-e9a2-457d-a250-6385abe46840 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.297597] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 911.301268] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 911.301268] env[69994]: value = "task-2925666" [ 911.301268] env[69994]: _type = "Task" [ 911.301268] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.309133] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.327715] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.328127] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.328433] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.328790] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.329102] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.329224] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.329539] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.329752] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.329993] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.330268] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.330589] env[69994]: DEBUG nova.virt.hardware [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.333337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2493d927-75b2-40e9-8f3e-4bccce02ea48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.341036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54eefea9-6865-4505-b986-37bbfc26d692 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.427151] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.452036] env[69994]: INFO nova.compute.manager [-] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Took 1.31 seconds to deallocate network for instance. [ 911.618018] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925664, 'name': CreateVM_Task, 'duration_secs': 0.337253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.618254] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 911.618957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.619142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.619475] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 911.619724] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab81c6ae-73e2-4ede-b86a-8fce022c10b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.624724] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 911.624724] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e2188-20f6-fcfd-bbc3-79c03acd439d" [ 911.624724] env[69994]: _type = "Task" [ 911.624724] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.632679] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e2188-20f6-fcfd-bbc3-79c03acd439d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.671963] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Successfully updated port: 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.687402] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 2ee43622-74f3-4bf6-88e3-cba4ff7ce33d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.714377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.714526] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.718146] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.411s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.718146] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.720025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.388s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.721582] env[69994]: INFO nova.compute.claims [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.765176] env[69994]: INFO nova.scheduler.client.report [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Deleted allocations for instance 9b6aca3c-337b-4067-80e0-487d956fabc7 [ 911.814735] env[69994]: DEBUG oslo_vmware.api [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.816677] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.817150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.817442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.820017] env[69994]: INFO nova.compute.manager [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 911.820017] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.820017] env[69994]: DEBUG nova.compute.manager [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.820017] env[69994]: DEBUG nova.network.neutron [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.927173] env[69994]: DEBUG oslo_vmware.api [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925661, 'name': PowerOnVM_Task, 'duration_secs': 0.687478} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.927454] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.927791] env[69994]: INFO nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Took 15.88 seconds to spawn the instance on the hypervisor. [ 911.928166] env[69994]: DEBUG nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 911.929670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba32290-7f77-4ac8-8abc-cf21cb3f7d1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.939530] env[69994]: DEBUG nova.network.neutron [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updated VIF entry in instance network info cache for port 0f0159af-9c04-46fe-8fac-ebd620726fd7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.939989] env[69994]: DEBUG nova.network.neutron [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.958595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.138609] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e2188-20f6-fcfd-bbc3-79c03acd439d, 'name': SearchDatastore_Task, 'duration_secs': 0.031938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.139216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.139664] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.140133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.140527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.140904] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.141374] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-968a52d1-0be4-4981-a617-c296baaecde8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.158021] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.158021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.158021] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eda52aa-024c-49bf-b0f8-c429a631e588 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.165182] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 912.165182] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f7134-2e50-4b95-c6b2-64a3b5c355ee" [ 912.165182] env[69994]: _type = "Task" [ 912.165182] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.172044] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f7134-2e50-4b95-c6b2-64a3b5c355ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.175840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.176172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.177318] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.191076] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1693ccdf-ea72-45d5-8b34-e2b0e155e528] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.222182] env[69994]: DEBUG nova.compute.utils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.222182] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 912.222182] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.273348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2cf27d5d-7742-44a6-89a4-94b7c62cbf81 tempest-FloatingIPsAssociationTestJSON-288183458 tempest-FloatingIPsAssociationTestJSON-288183458-project-member] Lock "9b6aca3c-337b-4067-80e0-487d956fabc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.960s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.286236] env[69994]: DEBUG nova.policy [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.447055] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d4559cf-e1fc-46a1-ac3c-f123c31debec req-65c4b11c-01e6-4e50-9d0b-02d16dab95a3 service nova] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.452257] env[69994]: INFO nova.compute.manager [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Took 46.52 seconds to build instance. [ 912.582753] env[69994]: DEBUG nova.compute.manager [req-b5010ff6-1b8a-49ee-b550-f5327b1d44f8 req-6dd62c4e-a5a6-4c68-9ef1-79725415294c service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Received event network-vif-deleted-f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 912.582982] env[69994]: INFO nova.compute.manager [req-b5010ff6-1b8a-49ee-b550-f5327b1d44f8 req-6dd62c4e-a5a6-4c68-9ef1-79725415294c service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Neutron deleted interface f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1; detaching it from the instance and deleting it from the info cache [ 912.585114] env[69994]: DEBUG nova.network.neutron [req-b5010ff6-1b8a-49ee-b550-f5327b1d44f8 req-6dd62c4e-a5a6-4c68-9ef1-79725415294c service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.629399] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Successfully created port: d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.675605] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f7134-2e50-4b95-c6b2-64a3b5c355ee, 'name': SearchDatastore_Task, 'duration_secs': 0.039493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.677544] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2392ae4-e222-401c-b94c-f450f72a57d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.685321] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 912.685321] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a4f111-336e-3007-15e8-d1ff56c28576" [ 912.685321] env[69994]: _type = "Task" [ 912.685321] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.695866] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f36c29d1-b945-4afe-abbd-431e59de7cec] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.697907] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a4f111-336e-3007-15e8-d1ff56c28576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.726043] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.741323] env[69994]: DEBUG nova.network.neutron [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.760386] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.828596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "309e5014-a43f-4346-9c11-036eb36c8c1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.955591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a898ac8-5a33-43fc-a364-a59c8851546a tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.042s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.955902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.127s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.956125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.956345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.956529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.958888] env[69994]: INFO nova.compute.manager [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Terminating instance [ 913.091789] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8aa22155-1c8f-43f7-93a8-071c700f6567 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.101455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d2cf12-11cf-46c0-9bd5-6737ea5563d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.137363] env[69994]: DEBUG nova.compute.manager [req-b5010ff6-1b8a-49ee-b550-f5327b1d44f8 req-6dd62c4e-a5a6-4c68-9ef1-79725415294c service nova] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Detach interface failed, port_id=f1a4dbcb-4a7a-4865-bc4f-b3c71ee65ae1, reason: Instance 627f89ad-0381-4de9-a429-c74e26975ce9 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 913.149013] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6460ad14-5188-4aea-9796-03ef9010e0b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.156037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095c4931-efe3-49a2-a34b-a9471e1e5805 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.185914] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dca4ccf-11f7-498b-b75d-ac5b7c76331d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.199869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e338f04-acee-4806-af04-614b14be7be2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.204150] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 367665db-def4-4148-a316-b83378e00ba8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.206611] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a4f111-336e-3007-15e8-d1ff56c28576, 'name': SearchDatastore_Task, 'duration_secs': 0.02861} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.208128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.208625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.209488] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb3979ac-9622-4b4e-8abb-166647adcf1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.228307] env[69994]: DEBUG nova.compute.provider_tree [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.242869] env[69994]: DEBUG nova.compute.manager [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-vif-plugged-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.243171] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Acquiring lock "43119e21-5226-482c-b640-33e73051a563-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.243528] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Lock "43119e21-5226-482c-b640-33e73051a563-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.243798] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Lock "43119e21-5226-482c-b640-33e73051a563-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.244205] env[69994]: DEBUG nova.compute.manager [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] No waiting events found dispatching network-vif-plugged-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.244497] env[69994]: WARNING nova.compute.manager [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received unexpected event network-vif-plugged-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee for instance with vm_state building and task_state spawning. [ 913.244846] env[69994]: DEBUG nova.compute.manager [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.245079] env[69994]: DEBUG nova.compute.manager [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing instance network info cache due to event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 913.245317] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.249017] env[69994]: INFO nova.compute.manager [-] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Took 1.43 seconds to deallocate network for instance. [ 913.249539] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 913.249539] env[69994]: value = "task-2925667" [ 913.249539] env[69994]: _type = "Task" [ 913.249539] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.264266] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.323544] env[69994]: DEBUG nova.network.neutron [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.464891] env[69994]: DEBUG nova.compute.manager [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 913.465211] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.466302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f10a24-8a6e-4e63-8f0f-46942f27e2b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.474230] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.474514] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-369a7295-c056-47d2-a755-5ca1b006e63c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.480741] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 913.480741] env[69994]: value = "task-2925668" [ 913.480741] env[69994]: _type = "Task" [ 913.480741] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.488449] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925668, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.710802] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: db9f7abd-ab45-49a3-9035-695b26756142] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.733160] env[69994]: DEBUG nova.scheduler.client.report [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.744932] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.758626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.776151] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925667, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.786397] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.790019] env[69994]: DEBUG nova.virt.hardware [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.790491] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42df655f-dec4-4c2a-ad8f-c116857ae1ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.801765] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463c3c69-f588-4940-880b-845719fc193a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.831025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.831025] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Instance network_info: |[{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.831025] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.831025] env[69994]: DEBUG nova.network.neutron [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.831025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:1d:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.839773] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Creating folder: Project (522a6d0ba5a54a3f9e6cdb0cb1d18b6f). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.841343] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d860f0e3-522e-44da-bb3f-e423e17a454e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.853029] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Created folder: Project (522a6d0ba5a54a3f9e6cdb0cb1d18b6f) in parent group-v587342. [ 913.853029] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Creating folder: Instances. Parent ref: group-v587519. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.853029] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0bd8918-190f-40ea-9c0a-467edd1d9d96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.864035] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Created folder: Instances in parent group-v587519. [ 913.864035] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.864035] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43119e21-5226-482c-b640-33e73051a563] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.864035] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57c263d6-e0f1-429d-8512-f9a37980b380 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.887387] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.887387] env[69994]: value = "task-2925671" [ 913.887387] env[69994]: _type = "Task" [ 913.887387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.896042] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925671, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.994658] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925668, 'name': PowerOffVM_Task, 'duration_secs': 0.32035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.994658] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.994658] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.994658] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a90ec232-9f8c-42c3-87f7-3ce2526ac0aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.071146] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.071544] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.071835] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore2] 309e5014-a43f-4346-9c11-036eb36c8c1f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.072224] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bc49371-5dc2-4001-a27f-67caf1f84f11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.081963] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 914.081963] env[69994]: value = "task-2925673" [ 914.081963] env[69994]: _type = "Task" [ 914.081963] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.096848] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.218370] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e87e1839-9fef-462d-b1ab-842ef76828a4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.243007] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.243585] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 914.249495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.307s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.249495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.250915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.188s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.251140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.253254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.675s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.255095] env[69994]: INFO nova.compute.claims [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.280420] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925667, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.280940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.281406] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.281802] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bef1866-9064-4186-99aa-660624a392ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.298099] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 914.298099] env[69994]: value = "task-2925674" [ 914.298099] env[69994]: _type = "Task" [ 914.298099] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.306981] env[69994]: INFO nova.scheduler.client.report [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Deleted allocations for instance 9d146d57-9948-4b18-a3f3-675b53d137ed [ 914.310028] env[69994]: INFO nova.scheduler.client.report [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleted allocations for instance f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8 [ 914.324882] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925674, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.398010] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925671, 'name': CreateVM_Task, 'duration_secs': 0.469809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.401016] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43119e21-5226-482c-b640-33e73051a563] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.401016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.401016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.401016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.401016] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74806d2b-b3d4-494d-bf76-ebd1754eed7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.406591] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 914.406591] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529584b0-d4f0-6a75-8cd4-463a3037d13d" [ 914.406591] env[69994]: _type = "Task" [ 914.406591] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.417350] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529584b0-d4f0-6a75-8cd4-463a3037d13d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.555368] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Successfully updated port: d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.591495] env[69994]: DEBUG oslo_vmware.api [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290998} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.591758] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.591944] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.592141] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.592324] env[69994]: INFO nova.compute.manager [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 914.592566] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.592752] env[69994]: DEBUG nova.compute.manager [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.592849] env[69994]: DEBUG nova.network.neutron [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.711514] env[69994]: DEBUG nova.network.neutron [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updated VIF entry in instance network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.711514] env[69994]: DEBUG nova.network.neutron [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.721701] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: c512ee01-7d45-49f0-b2ce-659392527264] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.760855] env[69994]: DEBUG nova.compute.utils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.764139] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 914.764387] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.812078] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925674, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076071} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.814701] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.819021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a9217f-0d06-4c9b-89d5-7a9e6bc6a3dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.842586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8c80118-bd63-4e9e-8b3f-653b1530825a tempest-ImagesOneServerTestJSON-488467358 tempest-ImagesOneServerTestJSON-488467358-project-member] Lock "9d146d57-9948-4b18-a3f3-675b53d137ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.262s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.856460] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.857882] env[69994]: DEBUG nova.policy [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcafd04d09f45fab9d573d11d01dfbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c324e22a0046460b9ad3ad8578f7ef6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 914.859734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9333eb25-7fab-4390-9bfe-79098b9a8ffa tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.282s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.862341] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8b5096f-21d3-4c80-a5a9-bede4c150e17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.886615] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 914.886615] env[69994]: value = "task-2925675" [ 914.886615] env[69994]: _type = "Task" [ 914.886615] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.896448] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925675, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.920640] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529584b0-d4f0-6a75-8cd4-463a3037d13d, 'name': SearchDatastore_Task, 'duration_secs': 0.014119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.920967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.921232] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.921473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.921617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.921796] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.923990] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce6f5f7b-fbf8-49f7-a4ce-3f277677ff88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.933881] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.934103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.935375] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a121013-f107-4d5f-83f9-86e2f6b7c0d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.940252] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 914.940252] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5275223c-2a4f-f190-958b-48a4b7c1fd71" [ 914.940252] env[69994]: _type = "Task" [ 914.940252] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.947906] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5275223c-2a4f-f190-958b-48a4b7c1fd71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.057075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.057334] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.058039] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.213857] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e2e463e-5fc5-4804-8f3c-6d7ce7e066f7 req-baef161d-5c2f-41fd-a81b-906e12639a13 service nova] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.225973] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e6cb7f94-9629-4eec-bd2c-5bc2b4583bf4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.274285] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 915.287052] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Received event network-vif-plugged-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 915.287052] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Acquiring lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.287052] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.287052] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.287052] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] No waiting events found dispatching network-vif-plugged-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.287052] env[69994]: WARNING nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Received unexpected event network-vif-plugged-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 for instance with vm_state building and task_state spawning. [ 915.287052] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Received event network-changed-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 915.287052] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Refreshing instance network info cache due to event network-changed-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 915.287052] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Acquiring lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.385714] env[69994]: DEBUG nova.network.neutron [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.398466] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925675, 'name': ReconfigVM_Task, 'duration_secs': 0.297738} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.398770] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfigured VM instance instance-0000003f to attach disk [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.399433] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfbec96b-3ef6-4ad0-adf1-106358f76b51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.405562] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 915.405562] env[69994]: value = "task-2925676" [ 915.405562] env[69994]: _type = "Task" [ 915.405562] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.421310] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925676, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.452444] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5275223c-2a4f-f190-958b-48a4b7c1fd71, 'name': SearchDatastore_Task, 'duration_secs': 0.010488} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.453592] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7778722a-963c-4dc7-9381-7caa9cf1887b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.458997] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 915.458997] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528e38f8-385b-f90d-4288-4a98a1e410a1" [ 915.458997] env[69994]: _type = "Task" [ 915.458997] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.470497] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528e38f8-385b-f90d-4288-4a98a1e410a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.471500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.471500] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 43119e21-5226-482c-b640-33e73051a563/43119e21-5226-482c-b640-33e73051a563.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.471500] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb6c455a-230c-463a-985e-a450a2f130c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.479049] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 915.479049] env[69994]: value = "task-2925677" [ 915.479049] env[69994]: _type = "Task" [ 915.479049] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.483223] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Successfully created port: 68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 915.497166] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.599418] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.718361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a0f768-8680-4c5a-a73e-c6c0f45986af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.727465] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69c60af-1ee5-4751-9135-565f9d29ac53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.731512] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 55dd32b0-e67f-4943-86e8-b9956267fedc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.767282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5c816c-d58c-4220-b1d1-642c8c10c806 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.777820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870132f9-61f6-4fee-9fcf-c4ef69ad3274 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.799591] env[69994]: DEBUG nova.compute.provider_tree [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.833227] env[69994]: DEBUG nova.network.neutron [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Updating instance_info_cache with network_info: [{"id": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "address": "fa:16:3e:b7:1f:1f", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9f86b-b4", "ovs_interfaceid": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.888260] env[69994]: INFO nova.compute.manager [-] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Took 1.30 seconds to deallocate network for instance. [ 915.916582] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925676, 'name': Rename_Task, 'duration_secs': 0.134667} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.917065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.917269] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7cfdaa4-6936-477b-a14f-8cc723f6e0a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.924624] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 915.924624] env[69994]: value = "task-2925678" [ 915.924624] env[69994]: _type = "Task" [ 915.924624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.935008] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.986772] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468064} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.986998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 43119e21-5226-482c-b640-33e73051a563/43119e21-5226-482c-b640-33e73051a563.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.987224] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.987475] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aaaafe4c-bb72-4524-9c06-aedf82b73d70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.994075] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 915.994075] env[69994]: value = "task-2925679" [ 915.994075] env[69994]: _type = "Task" [ 915.994075] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.001634] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925679, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.234950] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 70e5674d-4627-4720-9b87-955c2749e010] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.289783] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 916.303175] env[69994]: DEBUG nova.scheduler.client.report [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.320928] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.321177] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.321357] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.321565] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.321706] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.321846] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.325326] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.325582] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.325914] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.325994] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.326238] env[69994]: DEBUG nova.virt.hardware [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.329825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba2c90a-0c39-45a2-b699-df048c73723b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.335198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.336024] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Instance network_info: |[{"id": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "address": "fa:16:3e:b7:1f:1f", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9f86b-b4", "ovs_interfaceid": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.337899] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Acquired lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.338283] env[69994]: DEBUG nova.network.neutron [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Refreshing network info cache for port d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.340046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:1f:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.349266] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.349588] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.352169] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fd4cf4-3f36-4cf8-8b8c-e1376239aedf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.355543] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7094472-04e7-4d3c-956f-c3556fa436f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.394020] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.394020] env[69994]: value = "task-2925680" [ 916.394020] env[69994]: _type = "Task" [ 916.394020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.396674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.403615] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925680, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.436177] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.504029] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925679, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066617} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.505015] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.506091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68492a3b-83c8-4f9b-8b12-ec529c707e2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.530369] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 43119e21-5226-482c-b640-33e73051a563/43119e21-5226-482c-b640-33e73051a563.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.531106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eddbeef-5985-4d3b-84dd-08b76d0f5664 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.554130] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 916.554130] env[69994]: value = "task-2925681" [ 916.554130] env[69994]: _type = "Task" [ 916.554130] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.562975] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925681, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.738936] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 6aacfc4e-32b4-40d7-8240-e4449cf78925] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.810330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.810924] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.813553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.109s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.814107] env[69994]: DEBUG nova.objects.instance [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lazy-loading 'resources' on Instance uuid b00d09ea-5eee-47ed-adcb-288cdd362e89 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.877692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.877987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.878249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.878249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.878604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.887894] env[69994]: INFO nova.compute.manager [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Terminating instance [ 916.901584] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925680, 'name': CreateVM_Task, 'duration_secs': 0.484641} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.902437] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.903159] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.903636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.904018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 916.904568] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a12566bf-bbe9-4b5e-b4bf-749d6e333dfa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.909675] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 916.909675] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f64748-61e8-e21b-3aad-040b086541d7" [ 916.909675] env[69994]: _type = "Task" [ 916.909675] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.920250] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f64748-61e8-e21b-3aad-040b086541d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.935423] env[69994]: DEBUG oslo_vmware.api [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925678, 'name': PowerOnVM_Task, 'duration_secs': 0.685045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.935693] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.935962] env[69994]: INFO nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 8.14 seconds to spawn the instance on the hypervisor. [ 916.936323] env[69994]: DEBUG nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.937209] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78269861-fbd6-4997-bf98-e9699eb1fff0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.068551] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925681, 'name': ReconfigVM_Task, 'duration_secs': 0.32224} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.068852] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 43119e21-5226-482c-b640-33e73051a563/43119e21-5226-482c-b640-33e73051a563.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.069570] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f37bf876-3ecb-4eed-a7a1-7ba8bee2acf4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.076386] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 917.076386] env[69994]: value = "task-2925682" [ 917.076386] env[69994]: _type = "Task" [ 917.076386] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.084349] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925682, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.242613] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9717f586-cedc-4f21-9ea6-7bf6e2991327] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.258556] env[69994]: DEBUG nova.network.neutron [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Updated VIF entry in instance network info cache for port d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.259582] env[69994]: DEBUG nova.network.neutron [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Updating instance_info_cache with network_info: [{"id": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "address": "fa:16:3e:b7:1f:1f", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6a9f86b-b4", "ovs_interfaceid": "d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.317515] env[69994]: DEBUG nova.compute.utils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 917.322335] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 917.322515] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.397278] env[69994]: DEBUG nova.compute.manager [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 917.397381] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.398906] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b27abbd-89f8-4938-8abb-6f037c15f210 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.407320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.407613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5424f6b2-dd8f-40b6-a945-7fa6e7514b26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.416933] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 917.416933] env[69994]: value = "task-2925683" [ 917.416933] env[69994]: _type = "Task" [ 917.416933] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.418510] env[69994]: DEBUG nova.policy [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '384fc017e6c243c9b5f7f396aa8028ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '891cfe67dd0044f3920402752215e361', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.432885] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f64748-61e8-e21b-3aad-040b086541d7, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.434419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.434419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.434542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.434712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.435834] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.439270] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e42c99cf-8748-48f2-ae03-6a534c8ba360 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.442189] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.459134] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.459340] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.460934] env[69994]: INFO nova.compute.manager [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 44.28 seconds to build instance. [ 917.462086] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84dd3d06-fc7f-4554-81ca-a9e21c36ccad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.476481] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 917.476481] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4f16-0eee-d25a-58b1-4106e58b2117" [ 917.476481] env[69994]: _type = "Task" [ 917.476481] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.487635] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4f16-0eee-d25a-58b1-4106e58b2117, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.593065] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925682, 'name': Rename_Task, 'duration_secs': 0.148454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.593065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.593065] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de3972aa-9b35-4c82-a1cb-0b5aa5588a36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.599138] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 917.599138] env[69994]: value = "task-2925684" [ 917.599138] env[69994]: _type = "Task" [ 917.599138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.609340] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.675846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff647f66-959f-4c5c-8ae2-e7beaab30373 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.682417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cb6d92-8f17-4f6f-aa32-280e207afc90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.715235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5be68a3-5604-4bb7-a9ba-c3e5edb89a22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.723040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed674a8-baae-423b-921b-d4662ddd7ea3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.737391] env[69994]: DEBUG nova.compute.provider_tree [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.747552] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 8dcd6786-de5e-4fa0-a6c1-66ff9cf84fd6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.763357] env[69994]: DEBUG oslo_concurrency.lockutils [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] Releasing lock "refresh_cache-d28a6129-1bfe-40da-bc91-c68cf874aa36" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.763501] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Received event network-vif-deleted-02c12be4-6c2c-415c-bbf4-af57f2c1bb6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.763711] env[69994]: INFO nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Neutron deleted interface 02c12be4-6c2c-415c-bbf4-af57f2c1bb6f; detaching it from the instance and deleting it from the info cache [ 917.763814] env[69994]: DEBUG nova.network.neutron [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.829059] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.932903] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925683, 'name': PowerOffVM_Task, 'duration_secs': 0.179834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.933302] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.933646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.933961] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-073af531-e8dd-4040-8e2b-7cb6bfe58108 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.970090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2042ee07-1f69-4409-8ccc-37a2bef972b3 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.798s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.988502] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4f16-0eee-d25a-58b1-4106e58b2117, 'name': SearchDatastore_Task, 'duration_secs': 0.01552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.990381] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed1adad-3a9b-4407-bc47-62c9f004e2b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.992718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.992916] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.993104] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleting the datastore file [datastore1] 63d6a59a-d58c-4179-ad39-eb9863e6f84c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.993335] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bd2924c-ede4-4294-b8a0-1331e756dadd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.998658] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 917.998658] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265ffb8-31ba-580a-70ee-0be5cfe18eaa" [ 917.998658] env[69994]: _type = "Task" [ 917.998658] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.003863] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for the task: (returnval){ [ 918.003863] env[69994]: value = "task-2925686" [ 918.003863] env[69994]: _type = "Task" [ 918.003863] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.009754] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265ffb8-31ba-580a-70ee-0be5cfe18eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.015479] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.068821] env[69994]: DEBUG nova.compute.manager [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received event network-vif-plugged-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.069176] env[69994]: DEBUG oslo_concurrency.lockutils [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] Acquiring lock "8001cb13-6a52-451b-b4b6-57b893975079-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.069176] env[69994]: DEBUG oslo_concurrency.lockutils [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] Lock "8001cb13-6a52-451b-b4b6-57b893975079-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.070355] env[69994]: DEBUG oslo_concurrency.lockutils [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] Lock "8001cb13-6a52-451b-b4b6-57b893975079-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.070355] env[69994]: DEBUG nova.compute.manager [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] No waiting events found dispatching network-vif-plugged-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.070355] env[69994]: WARNING nova.compute.manager [req-892f08bb-8d94-432a-8088-596e23cbd781 req-3ce716af-dc7a-400c-a1bd-d3fc7b102c39 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received unexpected event network-vif-plugged-68208872-218b-45a2-b062-bedcf2b0803e for instance with vm_state building and task_state spawning. [ 918.109554] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925684, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.194540] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Successfully updated port: 68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.241292] env[69994]: DEBUG nova.scheduler.client.report [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.252446] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 803e9885-000f-4696-9fb9-03361ef46538] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.268668] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d8e7378-c028-4811-a674-546079f34f6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.276020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42875e75-d3fa-490f-82ab-fff59b7c9653 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.315696] env[69994]: DEBUG nova.compute.manager [req-a7870285-a61d-498f-bac9-8fd408b0012c req-caa67bec-ee87-4153-9d4f-cf202f75cc6f service nova] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Detach interface failed, port_id=02c12be4-6c2c-415c-bbf4-af57f2c1bb6f, reason: Instance 309e5014-a43f-4346-9c11-036eb36c8c1f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 918.369176] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Successfully created port: 695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 918.517071] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265ffb8-31ba-580a-70ee-0be5cfe18eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.009345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.517071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.517071] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] d28a6129-1bfe-40da-bc91-c68cf874aa36/d28a6129-1bfe-40da-bc91-c68cf874aa36.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.517071] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95ec5e17-adae-43a1-895c-1c2e51fd21b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.522322] env[69994]: DEBUG oslo_vmware.api [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Task: {'id': task-2925686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208297} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.523279] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.523599] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.523879] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.524184] env[69994]: INFO nova.compute.manager [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 918.524520] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.524799] env[69994]: DEBUG nova.compute.manager [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.524977] env[69994]: DEBUG nova.network.neutron [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.528349] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 918.528349] env[69994]: value = "task-2925687" [ 918.528349] env[69994]: _type = "Task" [ 918.528349] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.536791] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.610481] env[69994]: DEBUG oslo_vmware.api [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925684, 'name': PowerOnVM_Task, 'duration_secs': 0.599959} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.614018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.614018] env[69994]: INFO nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Took 7.31 seconds to spawn the instance on the hypervisor. [ 918.614018] env[69994]: DEBUG nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.614018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dce1336-4429-4702-8477-4e17c86c3837 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.699107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.699107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.699107] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.749398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.750444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.313s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.750547] env[69994]: DEBUG nova.objects.instance [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lazy-loading 'resources' on Instance uuid b80a405e-a02e-4b18-a325-753146533d1b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.758018] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f3ae584d-18a5-4bbe-b4bf-860e2332b324] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.780577] env[69994]: INFO nova.scheduler.client.report [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Deleted allocations for instance b00d09ea-5eee-47ed-adcb-288cdd362e89 [ 918.837676] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.879910] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.879910] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.880470] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.880470] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.880996] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.880996] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.881515] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.881751] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.881975] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.882758] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.882758] env[69994]: DEBUG nova.virt.hardware [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.883990] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47500d2b-8d38-40e1-997f-7740c0103595 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.899883] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462715e6-25b7-4a16-944a-b63336d483e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.040620] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925687, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.133633] env[69994]: INFO nova.compute.manager [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Took 41.36 seconds to build instance. [ 919.260783] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 75e952e7-6761-49a4-9193-175f5d30494e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.301863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e323d269-f475-491d-9699-9528fd4b99aa tempest-VolumesAdminNegativeTest-1005914206 tempest-VolumesAdminNegativeTest-1005914206-project-member] Lock "b00d09ea-5eee-47ed-adcb-288cdd362e89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.604s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.423098] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.545423] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925687, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682111} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.545953] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] d28a6129-1bfe-40da-bc91-c68cf874aa36/d28a6129-1bfe-40da-bc91-c68cf874aa36.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.546238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.546537] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98f06f40-c88a-4040-b814-80f66c2b2b65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.559191] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 919.559191] env[69994]: value = "task-2925688" [ 919.559191] env[69994]: _type = "Task" [ 919.559191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.573079] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925688, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.635546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94440f4e-b6b1-4b98-a57d-4540c9ee9082 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.882s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.672875] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3115a571-7065-42dd-9607-52ea1a17517b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.681863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb81d09-fd69-4025-ba2f-ff2f56b5a092 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.717255] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e233f1-85d5-4889-b7cc-d6fc1fe8e772 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.727896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a3630-b78a-4ae4-85e0-3bcaf22e3b1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.749447] env[69994]: DEBUG nova.compute.provider_tree [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.766292] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 443382a8-64af-4f13-b7ab-11234fb13fcf] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.866678] env[69994]: DEBUG nova.network.neutron [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [{"id": "68208872-218b-45a2-b062-bedcf2b0803e", "address": "fa:16:3e:1f:9d:a7", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68208872-21", "ovs_interfaceid": "68208872-218b-45a2-b062-bedcf2b0803e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.069538] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925688, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.069813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.070657] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f609e730-84c2-41f6-9802-a9f0642e080f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.095074] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] d28a6129-1bfe-40da-bc91-c68cf874aa36/d28a6129-1bfe-40da-bc91-c68cf874aa36.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.095398] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80f57592-257f-4ea7-92e8-2701c6491f3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.110798] env[69994]: DEBUG nova.network.neutron [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.117436] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 920.117436] env[69994]: value = "task-2925689" [ 920.117436] env[69994]: _type = "Task" [ 920.117436] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.128879] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925689, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.210982] env[69994]: DEBUG nova.compute.manager [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received event network-vif-plugged-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.210982] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] Acquiring lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.210982] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.211634] env[69994]: DEBUG oslo_concurrency.lockutils [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.211634] env[69994]: DEBUG nova.compute.manager [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] No waiting events found dispatching network-vif-plugged-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 920.211634] env[69994]: WARNING nova.compute.manager [req-5dde7916-7c5a-4506-a40b-d2e16d0e3952 req-9a5a2150-7e76-46a1-b68a-a57bbf4c1973 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received unexpected event network-vif-plugged-695f64c6-49fa-4348-bb2b-910d7a8546e6 for instance with vm_state building and task_state spawning. [ 920.220310] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Successfully updated port: 695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.245180] env[69994]: DEBUG nova.compute.manager [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received event network-changed-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.245405] env[69994]: DEBUG nova.compute.manager [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing instance network info cache due to event network-changed-68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.245589] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Acquiring lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.252524] env[69994]: DEBUG nova.scheduler.client.report [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.275095] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 7ea91d3b-1e43-45cd-9bff-e144c63177c8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.373582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.374142] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Instance network_info: |[{"id": "68208872-218b-45a2-b062-bedcf2b0803e", "address": "fa:16:3e:1f:9d:a7", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68208872-21", "ovs_interfaceid": "68208872-218b-45a2-b062-bedcf2b0803e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 920.374455] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Acquired lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.374455] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.375655] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:9d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68208872-218b-45a2-b062-bedcf2b0803e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.385688] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating folder: Project (c324e22a0046460b9ad3ad8578f7ef6f). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.386988] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81738e9e-2aa7-4df2-9c6a-80a1f12d2ba7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.398858] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created folder: Project (c324e22a0046460b9ad3ad8578f7ef6f) in parent group-v587342. [ 920.399128] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating folder: Instances. Parent ref: group-v587523. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.399406] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02f5337c-e54c-4116-b92c-f6f7649abe57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.409819] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created folder: Instances in parent group-v587523. [ 920.409819] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.410046] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 920.410270] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ecda44d-af4b-4cab-8b64-e469ce8fe54a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.429521] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.429521] env[69994]: value = "task-2925692" [ 920.429521] env[69994]: _type = "Task" [ 920.429521] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.437873] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925692, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.499450] env[69994]: INFO nova.compute.manager [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Rescuing [ 920.499878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.500813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.501105] env[69994]: DEBUG nova.network.neutron [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.613837] env[69994]: INFO nova.compute.manager [-] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Took 2.09 seconds to deallocate network for instance. [ 920.629291] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925689, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.723415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.723547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.723709] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.759546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.762054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.803s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.762309] env[69994]: DEBUG nova.objects.instance [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lazy-loading 'resources' on Instance uuid 767ecd3d-631d-43b5-8ebf-28b6cb2077e9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.782914] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 48f6ebca-d7fe-4086-80f4-0b89789dcddb] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.798207] env[69994]: INFO nova.scheduler.client.report [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Deleted allocations for instance b80a405e-a02e-4b18-a325-753146533d1b [ 920.943820] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925692, 'name': CreateVM_Task, 'duration_secs': 0.372823} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.944397] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.945210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.945487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.945883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 920.946264] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd74e24-6bf5-43f7-81aa-7a39bfd9fa6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.953718] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 920.953718] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ce5afc-53ed-5f42-0f42-3fff0b32bfaf" [ 920.953718] env[69994]: _type = "Task" [ 920.953718] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.963707] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ce5afc-53ed-5f42-0f42-3fff0b32bfaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.127372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.132060] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925689, 'name': ReconfigVM_Task, 'duration_secs': 0.629317} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.132540] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Reconfigured VM instance instance-00000041 to attach disk [datastore1] d28a6129-1bfe-40da-bc91-c68cf874aa36/d28a6129-1bfe-40da-bc91-c68cf874aa36.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.133301] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1487f450-3628-4fb9-9998-7af5124cce8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.141664] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 921.141664] env[69994]: value = "task-2925693" [ 921.141664] env[69994]: _type = "Task" [ 921.141664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.160892] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925693, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.286872] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 316ab41e-d3c1-4cef-8d63-a138e21d0ea3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.309715] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6975bce7-dd12-400b-885f-ae1580e957f3 tempest-ImagesOneServerNegativeTestJSON-630350183 tempest-ImagesOneServerNegativeTestJSON-630350183-project-member] Lock "b80a405e-a02e-4b18-a325-753146533d1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.776s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.324920] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.404088] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updated VIF entry in instance network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.404943] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [{"id": "68208872-218b-45a2-b062-bedcf2b0803e", "address": "fa:16:3e:1f:9d:a7", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68208872-21", "ovs_interfaceid": "68208872-218b-45a2-b062-bedcf2b0803e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.468482] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ce5afc-53ed-5f42-0f42-3fff0b32bfaf, 'name': SearchDatastore_Task, 'duration_secs': 0.012354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.469179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.469950] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.469950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.471356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.471356] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.471356] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf77bc59-312e-4420-9a58-2f609c5fb25f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.482024] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.482130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.483334] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7bcd01e-e257-4eeb-958e-b6e8e543b14f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.488887] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 921.488887] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52337208-3bd2-5ee3-f7cb-b6ac6f627e99" [ 921.488887] env[69994]: _type = "Task" [ 921.488887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.500325] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52337208-3bd2-5ee3-f7cb-b6ac6f627e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.516021] env[69994]: DEBUG nova.network.neutron [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.588447] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07dced1-2356-453b-b8c9-5c00f4d4660f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.597874] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02837bc5-5f55-4c38-ac97-c3bb4e61bd9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.643015] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2ad413-bc65-4143-b738-6f2a915baff9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.654104] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa81ea7c-7ca5-4c17-946d-2d1094e9b447 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.666222] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925693, 'name': Rename_Task, 'duration_secs': 0.35191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.667088] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.667444] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32522705-5743-4e9d-94b6-2951ab1a579f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.678316] env[69994]: DEBUG nova.compute.provider_tree [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.684537] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 921.684537] env[69994]: value = "task-2925694" [ 921.684537] env[69994]: _type = "Task" [ 921.684537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.694360] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.733579] env[69994]: DEBUG nova.network.neutron [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.791589] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1232f601-3339-4fc2-92b2-aa550af90b01] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.909963] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Releasing lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.910920] env[69994]: DEBUG nova.compute.manager [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Received event network-vif-deleted-6634b7a0-01a3-49e4-a7ac-6f8572d86925 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.911146] env[69994]: DEBUG nova.compute.manager [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Received event network-changed-0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.911315] env[69994]: DEBUG nova.compute.manager [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Refreshing instance network info cache due to event network-changed-0f0159af-9c04-46fe-8fac-ebd620726fd7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 921.911576] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.911672] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.911855] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Refreshing network info cache for port 0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.005390] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52337208-3bd2-5ee3-f7cb-b6ac6f627e99, 'name': SearchDatastore_Task, 'duration_secs': 0.01036} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.007096] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e02f1a45-61aa-4baa-bfe5-dcb906022b8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.013803] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 922.013803] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a277e1-9568-5e2d-013d-6308cb65387a" [ 922.013803] env[69994]: _type = "Task" [ 922.013803] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.020828] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.029506] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a277e1-9568-5e2d-013d-6308cb65387a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.182276] env[69994]: DEBUG nova.scheduler.client.report [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.198461] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925694, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.239162] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.239317] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Instance network_info: |[{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.240507] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:c8:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '695f64c6-49fa-4348-bb2b-910d7a8546e6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.249652] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.250008] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.250198] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8a39045-12ba-48f1-9bed-96d54baa8871 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.271548] env[69994]: DEBUG nova.compute.manager [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.272103] env[69994]: DEBUG nova.compute.manager [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing instance network info cache due to event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 922.272268] env[69994]: DEBUG oslo_concurrency.lockutils [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] Acquiring lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.272413] env[69994]: DEBUG oslo_concurrency.lockutils [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] Acquired lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.273213] env[69994]: DEBUG nova.network.neutron [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.280164] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.280164] env[69994]: value = "task-2925695" [ 922.280164] env[69994]: _type = "Task" [ 922.280164] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.293940] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925695, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.296063] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 317e3366-4aec-4c80-bcf9-df84bc5e9939] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.321872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "83cef95b-99a5-4e6e-8258-79b380b595b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.322297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.527158] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a277e1-9568-5e2d-013d-6308cb65387a, 'name': SearchDatastore_Task, 'duration_secs': 0.025999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.527475] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.527745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 8001cb13-6a52-451b-b4b6-57b893975079/8001cb13-6a52-451b-b4b6-57b893975079.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.528317] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88db44d8-a8ff-42ff-8e00-447aa5d216cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.535512] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 922.535512] env[69994]: value = "task-2925696" [ 922.535512] env[69994]: _type = "Task" [ 922.535512] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.548444] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.694786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.697116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.941s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.698267] env[69994]: DEBUG nova.objects.instance [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lazy-loading 'resources' on Instance uuid 627f89ad-0381-4de9-a429-c74e26975ce9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.706527] env[69994]: DEBUG oslo_vmware.api [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925694, 'name': PowerOnVM_Task, 'duration_secs': 0.946293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.706677] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.707813] env[69994]: INFO nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Took 8.96 seconds to spawn the instance on the hypervisor. [ 922.707813] env[69994]: DEBUG nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.708054] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df46c36-3547-4b87-8c8d-01f2dd122d89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.740686] env[69994]: INFO nova.scheduler.client.report [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Deleted allocations for instance 767ecd3d-631d-43b5-8ebf-28b6cb2077e9 [ 922.794607] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925695, 'name': CreateVM_Task, 'duration_secs': 0.32654} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.794924] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.795554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.795733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.796074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.796348] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2609fc2-5190-41b1-9ee0-d73b86c40e44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.799159] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1d5b8fb7-eeb0-49da-acdf-53b7741e863e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.804024] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 922.804024] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52604d36-73ea-4835-577b-ad6561e595de" [ 922.804024] env[69994]: _type = "Task" [ 922.804024] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.649444] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updated VIF entry in instance network info cache for port 0f0159af-9c04-46fe-8fac-ebd620726fd7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 923.649793] env[69994]: DEBUG nova.network.neutron [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.653330] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.668825] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: dc548f2f-e6d6-4273-8c24-b4f52842e0d2] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.673920] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8fa86e21-3d64-4c23-8eaa-9deeb1518582 tempest-ServersNegativeTestMultiTenantJSON-925898873 tempest-ServersNegativeTestMultiTenantJSON-925898873-project-member] Lock "767ecd3d-631d-43b5-8ebf-28b6cb2077e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.149s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.686105] env[69994]: INFO nova.compute.manager [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Took 39.27 seconds to build instance. [ 923.687676] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52604d36-73ea-4835-577b-ad6561e595de, 'name': SearchDatastore_Task, 'duration_secs': 0.020356} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.689387] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.689733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.690490] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.690490] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.691315] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.696968] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-012aad60-3836-4842-a91b-0580d897b918 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.699609] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773754} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.700992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 8001cb13-6a52-451b-b4b6-57b893975079/8001cb13-6a52-451b-b4b6-57b893975079.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.700992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.702403] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dfd95f49-462a-452a-acb3-9dcff381a5ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.711166] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.711369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.715935] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe6d6c52-6290-4523-8de9-1854fa35b5a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.718823] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 923.718823] env[69994]: value = "task-2925697" [ 923.718823] env[69994]: _type = "Task" [ 923.718823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.725387] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 923.725387] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5203b0a0-b223-a53b-50a2-5767fe4d730d" [ 923.725387] env[69994]: _type = "Task" [ 923.725387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.733069] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.741810] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5203b0a0-b223-a53b-50a2-5767fe4d730d, 'name': SearchDatastore_Task, 'duration_secs': 0.01237} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.742759] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-554a0763-813b-479e-ad99-96de85e2f555 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.748685] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 923.748685] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526a8da3-198b-0fce-600a-fce181e15a4f" [ 923.748685] env[69994]: _type = "Task" [ 923.748685] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.761367] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526a8da3-198b-0fce-600a-fce181e15a4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.033727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5eff62-ec43-4261-a3ee-22538595e680 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.048127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb81155-c94a-4f0d-a855-532d4ef88d64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.087533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7a8e6d-b792-4702-8d2a-acc6281e9609 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.097577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa78e152-1a0c-49a1-a6c5-b9ba601d0ec8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.114511] env[69994]: DEBUG nova.compute.provider_tree [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.178516] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8bd6e6b-9363-4423-91be-7c5807e3c5e9 req-390def2a-bf69-4bc4-ae6f-7263d867b7da service nova] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.189696] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4041b870-9745-4e77-b04a-700f34914830 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.786s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.197939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.211642] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.211642] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 924.216858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.216858] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ce50cbb-aa4c-481b-bac6-5570d84425ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.225564] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 924.225564] env[69994]: value = "task-2925698" [ 924.225564] env[69994]: _type = "Task" [ 924.225564] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.236459] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.307063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.237202] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.238292] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadcbb05-c88e-4f77-ad2c-cbf3618ff207 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.247299] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.271607] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 8001cb13-6a52-451b-b4b6-57b893975079/8001cb13-6a52-451b-b4b6-57b893975079.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.272201] env[69994]: DEBUG nova.network.neutron [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updated VIF entry in instance network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.272759] env[69994]: DEBUG nova.network.neutron [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.277216] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8045f3f5-3b4f-4ddf-b4d3-bc4af5476fd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.296623] env[69994]: DEBUG oslo_concurrency.lockutils [req-09a13923-8449-45d4-a83b-269ce4ba1034 req-a6b72e0a-a93c-4862-9fca-f0e527fb93ea service nova] Releasing lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.308320] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526a8da3-198b-0fce-600a-fce181e15a4f, 'name': SearchDatastore_Task, 'duration_secs': 0.015091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.308652] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 924.308652] env[69994]: value = "task-2925699" [ 924.308652] env[69994]: _type = "Task" [ 924.308652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.308880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.310564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 93087ec4-1d88-47cc-b1d2-0f1697556eae/93087ec4-1d88-47cc-b1d2-0f1697556eae.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.310913] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a656d222-b1e5-4dc4-b2f5-50e1205edc63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.322598] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.324598] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 924.324598] env[69994]: value = "task-2925700" [ 924.324598] env[69994]: _type = "Task" [ 924.324598] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.335498] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.448711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.448873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.449116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.449346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.449497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.454089] env[69994]: INFO nova.compute.manager [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Terminating instance [ 924.618609] env[69994]: DEBUG nova.scheduler.client.report [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.716446] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.740250] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925698, 'name': PowerOffVM_Task, 'duration_secs': 0.296406} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.740541] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.744044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d26a01-89b5-4124-84f7-0793ad6c8359 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.764886] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c919cf-6bd9-45e3-93d0-f4212ab18b1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.809944] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.809944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d291e79-986b-49c9-a608-50101ec1c46e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.823032] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.823717] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 924.823717] env[69994]: value = "task-2925701" [ 924.823717] env[69994]: _type = "Task" [ 924.823717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.837615] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925700, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.842968] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 924.843264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.843563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.843766] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.843988] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.844374] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f193fa47-a87a-4301-a274-c818b9073018 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.856536] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.856536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.857607] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-752d85b0-bd33-4ef6-b8d4-062259031860 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.866896] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 924.866896] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f25ef3-0286-8ea4-6ebd-08272e4fe7ee" [ 924.866896] env[69994]: _type = "Task" [ 924.866896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.877291] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f25ef3-0286-8ea4-6ebd-08272e4fe7ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.962029] env[69994]: DEBUG nova.compute.manager [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.962029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.962029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a31a5f1-1843-435b-af71-5ed493d639fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.972353] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.972650] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1eb218c-ac84-40ca-811b-4647be154c6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.978192] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 924.978192] env[69994]: value = "task-2925702" [ 924.978192] env[69994]: _type = "Task" [ 924.978192] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.988640] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925702, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.125069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.428s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.127448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.731s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.127783] env[69994]: DEBUG nova.objects.instance [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'resources' on Instance uuid 309e5014-a43f-4346-9c11-036eb36c8c1f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.164075] env[69994]: INFO nova.scheduler.client.report [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted allocations for instance 627f89ad-0381-4de9-a429-c74e26975ce9 [ 925.321779] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925699, 'name': ReconfigVM_Task, 'duration_secs': 0.673924} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.322080] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 8001cb13-6a52-451b-b4b6-57b893975079/8001cb13-6a52-451b-b4b6-57b893975079.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.322720] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28bf604c-4a31-4732-be92-06d399c1ccff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.332489] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 925.332489] env[69994]: value = "task-2925703" [ 925.332489] env[69994]: _type = "Task" [ 925.332489] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.340994] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.342624] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 93087ec4-1d88-47cc-b1d2-0f1697556eae/93087ec4-1d88-47cc-b1d2-0f1697556eae.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.342860] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.343135] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c80b7272-8c64-42ae-91c4-6cb27c15ff02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.349015] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925703, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.353874] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 925.353874] env[69994]: value = "task-2925704" [ 925.353874] env[69994]: _type = "Task" [ 925.353874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.363696] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925704, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.382921] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f25ef3-0286-8ea4-6ebd-08272e4fe7ee, 'name': SearchDatastore_Task, 'duration_secs': 0.027073} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.385262] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1335a2-7509-4b22-b742-1b68780423a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.394088] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 925.394088] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1607a-232a-8607-05a3-14fa4285a257" [ 925.394088] env[69994]: _type = "Task" [ 925.394088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.406553] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1607a-232a-8607-05a3-14fa4285a257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.490464] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925702, 'name': PowerOffVM_Task, 'duration_secs': 0.442381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.490911] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.491180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.491866] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bc66566-dc46-41cf-be69-1e89d05ae913 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.553630] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.553855] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.554048] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore1] d28a6129-1bfe-40da-bc91-c68cf874aa36 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.554320] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d4df9bd-67fd-49fc-b00d-7445678e8236 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.564971] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 925.564971] env[69994]: value = "task-2925706" [ 925.564971] env[69994]: _type = "Task" [ 925.564971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.573565] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.672682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-52185407-e0c1-422c-9e20-b650b89d68d4 tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "627f89ad-0381-4de9-a429-c74e26975ce9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.489s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.856307] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925703, 'name': Rename_Task, 'duration_secs': 0.160064} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.856307] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.857951] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6d17aea-e0d3-40fc-bbef-794c2f93135b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.870884] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925704, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076852} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.872201] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.872525] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 925.872525] env[69994]: value = "task-2925707" [ 925.872525] env[69994]: _type = "Task" [ 925.872525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.873255] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85010048-b990-428c-a86a-42e7ad1396d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.890500] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925707, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.908776] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 93087ec4-1d88-47cc-b1d2-0f1697556eae/93087ec4-1d88-47cc-b1d2-0f1697556eae.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.912167] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1794d2fb-700e-4235-aaa7-1c5bf925356c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.945019] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1607a-232a-8607-05a3-14fa4285a257, 'name': SearchDatastore_Task, 'duration_secs': 0.012659} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.945019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.945019] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 43119e21-5226-482c-b640-33e73051a563/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 925.945393] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 925.945393] env[69994]: value = "task-2925708" [ 925.945393] env[69994]: _type = "Task" [ 925.945393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.945560] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b1cd4bd-1d45-4f4b-9202-269dd7573d60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.961360] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.961360] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 925.961360] env[69994]: value = "task-2925709" [ 925.961360] env[69994]: _type = "Task" [ 925.961360] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.971698] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.048347] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996db111-eca7-4641-afae-370a58c3a975 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.059264] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2d8e99-e44e-4407-8784-9d869979590e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.098409] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9acf39-a863-4e79-bff3-3e1531fbf6e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.104463] env[69994]: DEBUG oslo_vmware.api [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21466} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.105241] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.105410] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.105626] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.106722] env[69994]: INFO nova.compute.manager [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Took 1.15 seconds to destroy the instance on the hypervisor. [ 926.106722] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.109123] env[69994]: DEBUG nova.compute.manager [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.109350] env[69994]: DEBUG nova.network.neutron [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.112449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237c9a94-f100-431f-a345-b14faba17b7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.133976] env[69994]: DEBUG nova.compute.provider_tree [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 926.390030] env[69994]: DEBUG oslo_vmware.api [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2925707, 'name': PowerOnVM_Task, 'duration_secs': 0.510894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.390368] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.390582] env[69994]: INFO nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Took 10.10 seconds to spawn the instance on the hypervisor. [ 926.390768] env[69994]: DEBUG nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 926.391697] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a6fc1f-7fc4-48c1-942e-64aa7e8cc92b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.462653] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925708, 'name': ReconfigVM_Task, 'duration_secs': 0.336005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.469833] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 93087ec4-1d88-47cc-b1d2-0f1697556eae/93087ec4-1d88-47cc-b1d2-0f1697556eae.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.471793] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19b582ee-fd1c-45db-8b24-9f35758bc12f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.481681] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.481681] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 43119e21-5226-482c-b640-33e73051a563/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 926.481681] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 926.481681] env[69994]: value = "task-2925710" [ 926.481681] env[69994]: _type = "Task" [ 926.481681] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.482396] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fb67f3-f195-4fb9-ae27-8174ec1a61be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.517713] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 43119e21-5226-482c-b640-33e73051a563/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.519526] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925710, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.519630] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79d01d74-15d8-47dd-bfd8-30af5479ba12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.535177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.535177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.535177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.535177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.535177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.536887] env[69994]: INFO nova.compute.manager [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Terminating instance [ 926.543821] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 926.543821] env[69994]: value = "task-2925711" [ 926.543821] env[69994]: _type = "Task" [ 926.543821] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.553187] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925711, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.673376] env[69994]: ERROR nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [req-7c580a33-21c2-4402-b55f-91cbcd8f9e36] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7c580a33-21c2-4402-b55f-91cbcd8f9e36"}]} [ 926.701646] env[69994]: DEBUG nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 926.718474] env[69994]: DEBUG nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 926.718705] env[69994]: DEBUG nova.compute.provider_tree [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 926.730109] env[69994]: DEBUG nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 926.755846] env[69994]: DEBUG nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 926.813139] env[69994]: DEBUG nova.compute.manager [req-e0e1cfb1-0eb9-4b52-b25c-e4f20e19a29c req-f169cd43-6bf1-4537-9104-e418c2d02ba8 service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Received event network-vif-deleted-d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.813893] env[69994]: INFO nova.compute.manager [req-e0e1cfb1-0eb9-4b52-b25c-e4f20e19a29c req-f169cd43-6bf1-4537-9104-e418c2d02ba8 service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Neutron deleted interface d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6; detaching it from the instance and deleting it from the info cache [ 926.813893] env[69994]: DEBUG nova.network.neutron [req-e0e1cfb1-0eb9-4b52-b25c-e4f20e19a29c req-f169cd43-6bf1-4537-9104-e418c2d02ba8 service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.920266] env[69994]: INFO nova.compute.manager [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Took 37.61 seconds to build instance. [ 926.999267] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925710, 'name': Rename_Task, 'duration_secs': 0.186962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.999643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.999798] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74bc7b27-7251-40d2-a5e1-d2016e7651c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.006854] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 927.006854] env[69994]: value = "task-2925712" [ 927.006854] env[69994]: _type = "Task" [ 927.006854] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.019121] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.045710] env[69994]: DEBUG nova.compute.manager [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.046076] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.050063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af77b0e4-7f41-4c61-93f2-aa0a16a718b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.061920] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925711, 'name': ReconfigVM_Task, 'duration_secs': 0.330397} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.064374] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 43119e21-5226-482c-b640-33e73051a563/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.064633] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.065499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8e1396-b86d-40e2-bfd3-6227322547c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.068125] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20d18e4c-7880-4120-b11a-c9fe79e73209 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.101152] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 927.101152] env[69994]: value = "task-2925713" [ 927.101152] env[69994]: _type = "Task" [ 927.101152] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.101772] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab8ee9ab-0d67-488d-bb75-fab5300d731e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.120511] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.124405] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 927.124405] env[69994]: value = "task-2925714" [ 927.124405] env[69994]: _type = "Task" [ 927.124405] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.134038] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925714, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.155637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e778e202-a377-4ea1-9d51-f3859b5b3bf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.173435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c46952-f5a5-465f-b8d5-4400a95e1b02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.206255] env[69994]: DEBUG nova.network.neutron [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.207917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c96e48f-f60a-4376-a7c4-52af00118c58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.216709] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ad8c99-25c4-4243-ae1e-c2fc9bae02e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.233026] env[69994]: DEBUG nova.compute.provider_tree [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.319554] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-607cebf2-0287-4ef3-b7bc-1cfb1b9f85e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.329250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d31226-f04e-4670-8122-703844dd26c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.365555] env[69994]: DEBUG nova.compute.manager [req-e0e1cfb1-0eb9-4b52-b25c-e4f20e19a29c req-f169cd43-6bf1-4537-9104-e418c2d02ba8 service nova] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Detach interface failed, port_id=d6a9f86b-b4c4-4b0d-8757-5b1f776f5ce6, reason: Instance d28a6129-1bfe-40da-bc91-c68cf874aa36 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 927.425470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11987af4-b4eb-49cc-b66e-3785280e7173 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.126s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.519890] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.623387] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925713, 'name': PowerOffVM_Task, 'duration_secs': 0.331091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.624067] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.626684] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.626684] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f030f58-8d45-4ba4-9002-d8a5e9bbc877 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.635386] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925714, 'name': ReconfigVM_Task, 'duration_secs': 0.187553} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.635905] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.636312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a15d795d-db4e-4950-877e-08af50d5c193 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.644071] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 927.644071] env[69994]: value = "task-2925716" [ 927.644071] env[69994]: _type = "Task" [ 927.644071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.713839] env[69994]: INFO nova.compute.manager [-] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Took 1.60 seconds to deallocate network for instance. [ 927.724363] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.724575] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.724761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleting the datastore file [datastore2] 298a4d59-733f-4cda-a9c2-80dc21be91ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.725313] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2f795c6-db2e-4306-9087-1ce794bbbde8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.737457] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for the task: (returnval){ [ 927.737457] env[69994]: value = "task-2925717" [ 927.737457] env[69994]: _type = "Task" [ 927.737457] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.755017] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.782151] env[69994]: DEBUG nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 927.783417] env[69994]: DEBUG nova.compute.provider_tree [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 92 to 93 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 927.783770] env[69994]: DEBUG nova.compute.provider_tree [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.024709] env[69994]: DEBUG oslo_vmware.api [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925712, 'name': PowerOnVM_Task, 'duration_secs': 0.840185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.025275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.025738] env[69994]: INFO nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Took 9.19 seconds to spawn the instance on the hypervisor. [ 928.025963] env[69994]: DEBUG nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.027916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999700c2-b0bd-4cdd-a7ac-8caaebb36070 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.154270] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925716, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.226883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.247599] env[69994]: DEBUG oslo_vmware.api [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Task: {'id': task-2925717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150497} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.248243] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.248385] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.248682] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.248791] env[69994]: INFO nova.compute.manager [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Took 1.20 seconds to destroy the instance on the hypervisor. [ 928.249042] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.249262] env[69994]: DEBUG nova.compute.manager [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.249356] env[69994]: DEBUG nova.network.neutron [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.291068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.164s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.293537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.166s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.293779] env[69994]: DEBUG nova.objects.instance [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lazy-loading 'resources' on Instance uuid 63d6a59a-d58c-4179-ad39-eb9863e6f84c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.322746] env[69994]: INFO nova.scheduler.client.report [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted allocations for instance 309e5014-a43f-4346-9c11-036eb36c8c1f [ 928.554395] env[69994]: INFO nova.compute.manager [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Took 22.99 seconds to build instance. [ 928.558923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "15595947-b944-4c82-90ae-883ed951c909" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.559071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.661504] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925716, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.837091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e57b3e9c-f16d-4a80-89c0-a72aad5e28e5 tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "309e5014-a43f-4346-9c11-036eb36c8c1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.881s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.899943] env[69994]: DEBUG nova.compute.manager [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received event network-changed-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 928.902139] env[69994]: DEBUG nova.compute.manager [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing instance network info cache due to event network-changed-68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 928.902139] env[69994]: DEBUG oslo_concurrency.lockutils [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] Acquiring lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.902139] env[69994]: DEBUG oslo_concurrency.lockutils [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] Acquired lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.902305] env[69994]: DEBUG nova.network.neutron [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.065553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb72cf8f-645c-43fa-9a7d-fee9fe7fa113 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.514s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.066888] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.166286] env[69994]: DEBUG oslo_vmware.api [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925716, 'name': PowerOnVM_Task, 'duration_secs': 1.027904} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.166535] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.171175] env[69994]: DEBUG nova.compute.manager [None req-3dadfd83-2c67-4c53-8790-cd158ac81d54 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.171175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b778e0f-20e6-4eae-a9dd-deeef2cc5be0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.219030] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b1162c-88d4-47d2-8ea3-b0608796555f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.230435] env[69994]: DEBUG nova.network.neutron [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.233539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c404f9-5b0f-45e8-a284-367b607afcd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.271145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09ed251-1198-4796-afe7-9031b7f93a0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.280192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c136fa-f9de-4cdb-8fca-6ddb7090d6c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.295637] env[69994]: DEBUG nova.compute.provider_tree [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.596404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.740314] env[69994]: INFO nova.compute.manager [-] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Took 1.49 seconds to deallocate network for instance. [ 929.759414] env[69994]: DEBUG nova.network.neutron [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updated VIF entry in instance network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.759414] env[69994]: DEBUG nova.network.neutron [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [{"id": "68208872-218b-45a2-b062-bedcf2b0803e", "address": "fa:16:3e:1f:9d:a7", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68208872-21", "ovs_interfaceid": "68208872-218b-45a2-b062-bedcf2b0803e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.826020] env[69994]: ERROR nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] [req-66243620-6162-4ad5-abe4-346a10cba7d2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-66243620-6162-4ad5-abe4-346a10cba7d2"}]} [ 929.839152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "660277f8-a7ff-43a9-8068-15e3db5a1069" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.839684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.843073] env[69994]: DEBUG nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 929.863994] env[69994]: DEBUG nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 929.864485] env[69994]: DEBUG nova.compute.provider_tree [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.879696] env[69994]: DEBUG nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 929.904212] env[69994]: DEBUG nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.226385] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348fde41-b5f9-4fbc-9c94-7fb3018f5389 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.238661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a2779e-83b1-4b5b-90af-85559a316c5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.277037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.277565] env[69994]: DEBUG oslo_concurrency.lockutils [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] Releasing lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.277799] env[69994]: DEBUG nova.compute.manager [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Received event network-vif-deleted-a81f7c8f-8499-4f74-8860-fca65590ea7b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.278021] env[69994]: INFO nova.compute.manager [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Neutron deleted interface a81f7c8f-8499-4f74-8860-fca65590ea7b; detaching it from the instance and deleting it from the info cache [ 930.278192] env[69994]: DEBUG nova.network.neutron [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.280406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b40ea1c-5712-4fe0-b4f7-8def74366687 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.290449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c561b0ca-cdad-408a-a401-29e0923879f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.307338] env[69994]: DEBUG nova.compute.provider_tree [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.347462] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.786673] env[69994]: DEBUG nova.compute.manager [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.786837] env[69994]: DEBUG nova.compute.manager [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing instance network info cache due to event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.787230] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.787230] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.787854] env[69994]: DEBUG nova.network.neutron [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.788844] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92eec87e-a635-4706-8bab-1192d5e731da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.799482] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f160880-1b21-4119-bc16-7607c960b159 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.834095] env[69994]: DEBUG nova.compute.manager [req-38afbf1f-a995-41e4-939b-a0f31b2540f1 req-4a315c7a-d246-495d-bcb9-fa53565f78e3 service nova] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Detach interface failed, port_id=a81f7c8f-8499-4f74-8860-fca65590ea7b, reason: Instance 298a4d59-733f-4cda-a9c2-80dc21be91ca could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 930.861181] env[69994]: DEBUG nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 930.862024] env[69994]: DEBUG nova.compute.provider_tree [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 95 to 96 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 930.862024] env[69994]: DEBUG nova.compute.provider_tree [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.880967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.967287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.967373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.967642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.968868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.968868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.971200] env[69994]: INFO nova.compute.manager [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Terminating instance [ 931.367948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.074s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.371117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.173s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.372637] env[69994]: INFO nova.compute.claims [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.380830] env[69994]: DEBUG nova.compute.manager [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 931.381017] env[69994]: DEBUG nova.compute.manager [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing instance network info cache due to event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 931.381246] env[69994]: DEBUG oslo_concurrency.lockutils [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] Acquiring lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.381387] env[69994]: DEBUG oslo_concurrency.lockutils [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] Acquired lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.381607] env[69994]: DEBUG nova.network.neutron [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.394997] env[69994]: INFO nova.scheduler.client.report [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Deleted allocations for instance 63d6a59a-d58c-4179-ad39-eb9863e6f84c [ 931.475972] env[69994]: DEBUG nova.compute.manager [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 931.476242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.477129] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8694a753-f54c-4885-98d6-e7ec160f344d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.492754] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.492754] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1622d4ba-fbf4-42d9-92b6-53c61afda3fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.497575] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 931.497575] env[69994]: value = "task-2925718" [ 931.497575] env[69994]: _type = "Task" [ 931.497575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.510585] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.590304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.590712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.591257] env[69994]: INFO nova.compute.manager [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Rebooting instance [ 931.631764] env[69994]: DEBUG nova.network.neutron [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updated VIF entry in instance network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.633134] env[69994]: DEBUG nova.network.neutron [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.700106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.700460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.910108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-283f8dbe-043d-4316-b988-47928f02bb8d tempest-MigrationsAdminTest-1712202806 tempest-MigrationsAdminTest-1712202806-project-member] Lock "63d6a59a-d58c-4179-ad39-eb9863e6f84c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.032s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.009834] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925718, 'name': PowerOffVM_Task, 'duration_secs': 0.23927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.009976] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.010080] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.010342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8471368f-c474-41e4-924c-f6c14429299d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.040535] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "ee68a538-d803-4bd6-9117-b021b28da899" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.040673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.083877] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.084267] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.084367] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore2] b4c6b628-426e-4efc-b8b6-0c2937ef6df3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.084734] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a85f241-76b7-4adc-925a-77914398af55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.091259] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 932.091259] env[69994]: value = "task-2925720" [ 932.091259] env[69994]: _type = "Task" [ 932.091259] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.102750] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.123224] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.135472] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa3d5d39-845a-428e-86a8-8a9c80f3f486 req-d7ae93db-7bbc-4a9b-b784-a3e0bbb2a2de service nova] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.203849] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 932.313162] env[69994]: DEBUG nova.network.neutron [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updated VIF entry in instance network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.313162] env[69994]: DEBUG nova.network.neutron [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.548822] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 932.606724] env[69994]: DEBUG oslo_vmware.api [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.394877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.606724] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.606724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.606724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.606724] env[69994]: INFO nova.compute.manager [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 932.607037] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.607270] env[69994]: DEBUG nova.compute.manager [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.607372] env[69994]: DEBUG nova.network.neutron [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.728105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.730030] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124fe5fb-620e-4c67-8ae2-303930d679a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.737519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b95f153-c9b7-48e7-a283-ce0412ca9b55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.770360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c3fc1f-ff40-49bd-80c0-b9bfe828b10c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.777998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5123b8dc-eec8-4b46-89a3-c69aa7c0ce2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.791262] env[69994]: DEBUG nova.compute.provider_tree [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.816669] env[69994]: DEBUG oslo_concurrency.lockutils [req-830e56fa-83cf-4a04-858a-a6c5ac14734e req-aeb0f5cd-0da8-470a-9881-efbc771ab022 service nova] Releasing lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.817142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquired lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.817582] env[69994]: DEBUG nova.network.neutron [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.073867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.145841] env[69994]: DEBUG nova.compute.manager [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.146061] env[69994]: DEBUG nova.compute.manager [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing instance network info cache due to event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.146756] env[69994]: DEBUG oslo_concurrency.lockutils [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.146960] env[69994]: DEBUG oslo_concurrency.lockutils [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.147116] env[69994]: DEBUG nova.network.neutron [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 933.294842] env[69994]: DEBUG nova.scheduler.client.report [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.500023] env[69994]: DEBUG nova.network.neutron [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.675634] env[69994]: DEBUG nova.compute.manager [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Received event network-vif-deleted-76010ade-afe2-44ed-bf2f-ed07bdaac451 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.675871] env[69994]: DEBUG nova.compute.manager [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.676891] env[69994]: DEBUG nova.compute.manager [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing instance network info cache due to event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.677183] env[69994]: DEBUG oslo_concurrency.lockutils [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.751641] env[69994]: DEBUG nova.network.neutron [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.803748] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.804266] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 933.807434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.581s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.807682] env[69994]: DEBUG nova.objects.instance [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid d28a6129-1bfe-40da-bc91-c68cf874aa36 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.879923] env[69994]: DEBUG nova.network.neutron [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updated VIF entry in instance network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.880335] env[69994]: DEBUG nova.network.neutron [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.003733] env[69994]: INFO nova.compute.manager [-] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Took 1.40 seconds to deallocate network for instance. [ 934.254758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Releasing lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.312336] env[69994]: DEBUG nova.compute.utils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 934.314677] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 934.315112] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.370242] env[69994]: DEBUG nova.policy [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d6a1603506e4d48a9d2f8bf61475821', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f602778aac0d41c49e73c2450f31d711', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 934.385016] env[69994]: DEBUG oslo_concurrency.lockutils [req-284fe289-d50f-4022-8160-ea75aa5c8de2 req-7bdb0da7-b791-4577-88c6-62c8e7ffe576 service nova] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.385016] env[69994]: DEBUG oslo_concurrency.lockutils [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.385016] env[69994]: DEBUG nova.network.neutron [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.511278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.752388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe78b0d-5877-427d-ae89-c01ebc3f9776 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.761611] env[69994]: DEBUG nova.compute.manager [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.762633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa1e167-5c8c-4dbc-9767-558e33707573 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.766381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9230ce65-ff93-4b2c-adab-56ea94f4a6ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.805198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1ba2c6-ee97-48eb-86af-ecd702b3f885 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.812872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5179c318-d3c7-44ce-9e80-39933537120b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.825721] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 934.835583] env[69994]: DEBUG nova.compute.provider_tree [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.971515] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Successfully created port: 2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.180441] env[69994]: DEBUG nova.compute.manager [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.180558] env[69994]: DEBUG nova.compute.manager [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing instance network info cache due to event network-changed-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 935.180688] env[69994]: DEBUG oslo_concurrency.lockutils [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] Acquiring lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.316108] env[69994]: DEBUG nova.network.neutron [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updated VIF entry in instance network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.316491] env[69994]: DEBUG nova.network.neutron [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.343222] env[69994]: DEBUG nova.scheduler.client.report [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.813022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fba66c-bdfc-4448-a0b3-93d797daf4e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.822327] env[69994]: DEBUG oslo_concurrency.lockutils [req-906ac399-8e1a-41ec-85e2-ee493bc0e8b6 req-d5b4a979-3e94-4a69-b9f1-900bb60bd6df service nova] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.822728] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 935.822986] env[69994]: DEBUG oslo_concurrency.lockutils [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] Acquired lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.824492] env[69994]: DEBUG nova.network.neutron [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Refreshing network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.824492] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e951de21-b684-4376-8b2d-fca14e3da0ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.831641] env[69994]: DEBUG oslo_vmware.api [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 935.831641] env[69994]: value = "task-2925721" [ 935.831641] env[69994]: _type = "Task" [ 935.831641] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.842465] env[69994]: DEBUG oslo_vmware.api [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925721, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.849099] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 935.852066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.853959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.258s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.855249] env[69994]: INFO nova.compute.claims [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.881030] env[69994]: INFO nova.scheduler.client.report [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance d28a6129-1bfe-40da-bc91-c68cf874aa36 [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 935.885803] env[69994]: DEBUG nova.virt.hardware [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 935.886336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a60397-1bf0-4df1-979e-7fd63fa886ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.897026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ce41f4-1159-4bd2-aa61-b159b14ca4a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.211656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "43119e21-5226-482c-b640-33e73051a563" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.211656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.211656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "43119e21-5226-482c-b640-33e73051a563-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.211656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.211656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.213114] env[69994]: INFO nova.compute.manager [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Terminating instance [ 936.342959] env[69994]: DEBUG oslo_vmware.api [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925721, 'name': ResetVM_Task, 'duration_secs': 0.092041} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.343249] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 936.343445] env[69994]: DEBUG nova.compute.manager [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.344486] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a519da4b-a531-4fdd-83f5-5c2aa58c9263 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.397060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c02522f4-189d-4ea2-8c87-fe8c00c96fde tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d28a6129-1bfe-40da-bc91-c68cf874aa36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.948s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.717109] env[69994]: DEBUG nova.compute.manager [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.717382] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.718429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa1de95-ccd2-411a-afbe-53ecc488e3ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.726987] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.727290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30b11778-5eac-42f2-b750-5cfa87d12ed7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.732687] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 936.732687] env[69994]: value = "task-2925722" [ 936.732687] env[69994]: _type = "Task" [ 936.732687] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.744342] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.765154] env[69994]: DEBUG nova.network.neutron [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updated VIF entry in instance network info cache for port 7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.767143] env[69994]: DEBUG nova.network.neutron [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [{"id": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "address": "fa:16:3e:02:1d:b7", "network": {"id": "fdb14e71-8fc5-4c42-b3c1-40ef6285d670", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1372511022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "522a6d0ba5a54a3f9e6cdb0cb1d18b6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e211ec9-4d", "ovs_interfaceid": "7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.860062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc1409f5-701b-4dd3-8329-19e18ae9c958 tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.269s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.901616] env[69994]: DEBUG nova.compute.manager [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Received event network-vif-plugged-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.901616] env[69994]: DEBUG oslo_concurrency.lockutils [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] Acquiring lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.901616] env[69994]: DEBUG oslo_concurrency.lockutils [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.901616] env[69994]: DEBUG oslo_concurrency.lockutils [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.901616] env[69994]: DEBUG nova.compute.manager [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] No waiting events found dispatching network-vif-plugged-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 936.901616] env[69994]: WARNING nova.compute.manager [req-48957e5b-4cc5-48c8-ac4c-75cdccce711d req-9d1406a5-952f-488f-8996-8d00e95b03c2 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Received unexpected event network-vif-plugged-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db for instance with vm_state building and task_state spawning. [ 937.157199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e4dffb-9b0b-45f2-95b2-c0682c06c3a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.165574] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdefa5bd-365b-46c2-a9ff-d04f8f098556 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.203787] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Successfully updated port: 2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.207851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff17c4c-9e5f-411a-834e-4e1b1ddf7945 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.217533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e2a9f-428b-47cf-92d4-2e76393bc839 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.232523] env[69994]: DEBUG nova.compute.provider_tree [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.249031] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925722, 'name': PowerOffVM_Task, 'duration_secs': 0.255941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.249031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.249031] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.249031] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e51bb2e-0089-42f8-99ce-71bd30e6427b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.273175] env[69994]: DEBUG oslo_concurrency.lockutils [req-398b8dd4-f895-4974-bf84-d967be7bd0b8 req-b9e4d184-89b0-4101-81a1-cd08e3b66d4d service nova] Releasing lock "refresh_cache-43119e21-5226-482c-b640-33e73051a563" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.713399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.713769] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.713944] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.738778] env[69994]: DEBUG nova.scheduler.client.report [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.972930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.973322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.093768] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.094087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.094321] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Deleting the datastore file [datastore1] 43119e21-5226-482c-b640-33e73051a563 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.094953] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-781de4e7-beba-40b8-b692-2bb2e0313ef2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.104304] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for the task: (returnval){ [ 938.104304] env[69994]: value = "task-2925724" [ 938.104304] env[69994]: _type = "Task" [ 938.104304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.119235] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.249023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.249835] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 938.254541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.978s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.255023] env[69994]: DEBUG nova.objects.instance [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lazy-loading 'resources' on Instance uuid 298a4d59-733f-4cda-a9c2-80dc21be91ca {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.316347] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.476652] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 938.575217] env[69994]: DEBUG nova.network.neutron [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Updating instance_info_cache with network_info: [{"id": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "address": "fa:16:3e:b7:dd:ef", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bda4ed5-72", "ovs_interfaceid": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.618023] env[69994]: DEBUG oslo_vmware.api [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Task: {'id': task-2925724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259002} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.618314] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.618571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.618759] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.618971] env[69994]: INFO nova.compute.manager [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] [instance: 43119e21-5226-482c-b640-33e73051a563] Took 1.90 seconds to destroy the instance on the hypervisor. [ 938.619274] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.619508] env[69994]: DEBUG nova.compute.manager [-] [instance: 43119e21-5226-482c-b640-33e73051a563] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.619625] env[69994]: DEBUG nova.network.neutron [-] [instance: 43119e21-5226-482c-b640-33e73051a563] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.758400] env[69994]: DEBUG nova.compute.utils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 938.761300] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 938.997380] env[69994]: DEBUG nova.compute.manager [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Received event network-changed-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.997380] env[69994]: DEBUG nova.compute.manager [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Refreshing instance network info cache due to event network-changed-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 938.997380] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Acquiring lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.004559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.079836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.080245] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance network_info: |[{"id": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "address": "fa:16:3e:b7:dd:ef", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bda4ed5-72", "ovs_interfaceid": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 939.080589] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Acquired lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.080819] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Refreshing network info cache for port 2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.082169] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:dd:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bda4ed5-72d2-44ec-bf8e-43efc8fab0db', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.090496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating folder: Project (f602778aac0d41c49e73c2450f31d711). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.098027] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07e9ccc-f020-4a87-98e7-4cacdd97b226 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.110540] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created folder: Project (f602778aac0d41c49e73c2450f31d711) in parent group-v587342. [ 939.110582] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating folder: Instances. Parent ref: group-v587527. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.110827] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e13a24e-622d-4f8c-820a-9ed718e9356f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.127787] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created folder: Instances in parent group-v587527. [ 939.127787] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.127787] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.127787] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b363af3-dbb2-4ed3-a729-2751b4078b0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.149698] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.149698] env[69994]: value = "task-2925727" [ 939.149698] env[69994]: _type = "Task" [ 939.149698] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.163603] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925727, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.169997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3d3fa0-98bd-4dde-816b-0d6235710285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.180202] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5ac547-5e9d-485c-8e0f-d8ae778313bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.218839] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf8b5ad-6056-4af3-a3ee-5806cf8485fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.227640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebb2551-b59b-4c5f-a87c-b25b6996420a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.242156] env[69994]: DEBUG nova.compute.provider_tree [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.264256] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 939.367258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.367439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.367692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.368135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.368135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.373594] env[69994]: INFO nova.compute.manager [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Terminating instance [ 939.426155] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Updated VIF entry in instance network info cache for port 2bda4ed5-72d2-44ec-bf8e-43efc8fab0db. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.426853] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Updating instance_info_cache with network_info: [{"id": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "address": "fa:16:3e:b7:dd:ef", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bda4ed5-72", "ovs_interfaceid": "2bda4ed5-72d2-44ec-bf8e-43efc8fab0db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.443545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.444265] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.500508] env[69994]: DEBUG nova.network.neutron [-] [instance: 43119e21-5226-482c-b640-33e73051a563] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.661473] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925727, 'name': CreateVM_Task, 'duration_secs': 0.466539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.661669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.662762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.662939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.663307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 939.663553] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46b4ba4e-ca74-4184-b4d1-0c125231e08e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.669509] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 939.669509] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f0242-5b40-042d-db3e-e9d0bd5dec73" [ 939.669509] env[69994]: _type = "Task" [ 939.669509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.682411] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f0242-5b40-042d-db3e-e9d0bd5dec73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.747846] env[69994]: DEBUG nova.scheduler.client.report [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.878923] env[69994]: DEBUG nova.compute.manager [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.879192] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.880139] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f38e66-a7e2-4695-b970-64955ff2213f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.888104] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.888738] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c38af0c6-42fa-491b-b37c-c032f526713a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.895403] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 939.895403] env[69994]: value = "task-2925728" [ 939.895403] env[69994]: _type = "Task" [ 939.895403] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.903410] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.931444] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Releasing lock "refresh_cache-83cef95b-99a5-4e6e-8258-79b380b595b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.931632] env[69994]: DEBUG nova.compute.manager [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.931826] env[69994]: DEBUG nova.compute.manager [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing instance network info cache due to event network-changed-695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 939.932076] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Acquiring lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.932230] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Acquired lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.932420] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Refreshing network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.946171] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 940.007325] env[69994]: INFO nova.compute.manager [-] [instance: 43119e21-5226-482c-b640-33e73051a563] Took 1.39 seconds to deallocate network for instance. [ 940.178870] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f0242-5b40-042d-db3e-e9d0bd5dec73, 'name': SearchDatastore_Task, 'duration_secs': 0.010211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.179200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.179480] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.179719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.179865] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.180062] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.180340] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89deb895-cb25-4796-a449-969fa80a1519 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.190168] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.190345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.191149] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1785926d-0995-4e48-b2ee-4d67ce37a0dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.196751] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 940.196751] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9e9f5-41cf-714e-3266-f2d2848b2400" [ 940.196751] env[69994]: _type = "Task" [ 940.196751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.206312] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9e9f5-41cf-714e-3266-f2d2848b2400, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.253608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.256304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.375s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.258034] env[69994]: INFO nova.compute.claims [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.273148] env[69994]: INFO nova.scheduler.client.report [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Deleted allocations for instance 298a4d59-733f-4cda-a9c2-80dc21be91ca [ 940.274816] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 940.299093] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 940.300028] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 940.300028] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 940.300028] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 940.300028] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 940.300028] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 940.300421] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 940.300421] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 940.300798] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 940.301134] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 940.301211] env[69994]: DEBUG nova.virt.hardware [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 940.302100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01364e49-8324-45ee-9f87-c96d542ff857 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.310960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed65182-f03c-45cd-82f5-ffb9d2c4a36c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.325160] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.330734] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Creating folder: Project (e1b56106c650411a9051091bf6a366a4). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 940.331540] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49b22dbd-fb0a-49fc-a5e0-008740d7d482 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.343020] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Created folder: Project (e1b56106c650411a9051091bf6a366a4) in parent group-v587342. [ 940.343020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Creating folder: Instances. Parent ref: group-v587530. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 940.343020] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d55536ab-a03e-4e6c-a453-7667f9626e50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.351770] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Created folder: Instances in parent group-v587530. [ 940.352241] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.352574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.354819] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-288ec18e-ad09-48ea-b685-a8a5843a5ee7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.369371] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.369371] env[69994]: value = "task-2925731" [ 940.369371] env[69994]: _type = "Task" [ 940.369371] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.381687] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925731, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.405016] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925728, 'name': PowerOffVM_Task, 'duration_secs': 0.225285} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.405016] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.405016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.407269] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c78418d-93cc-4d7a-ae51-551edb9c5300 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.466890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.475872] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.476128] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.477078] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleting the datastore file [datastore2] 93087ec4-1d88-47cc-b1d2-0f1697556eae {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.477377] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dab5b0c2-99a9-441c-96e6-5094256b8cf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.484524] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 940.484524] env[69994]: value = "task-2925733" [ 940.484524] env[69994]: _type = "Task" [ 940.484524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.493728] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.523045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.655444] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updated VIF entry in instance network info cache for port 695f64c6-49fa-4348-bb2b-910d7a8546e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.655941] env[69994]: DEBUG nova.network.neutron [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [{"id": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "address": "fa:16:3e:7d:c8:e8", "network": {"id": "e62ff410-9189-4ff0-98cd-b044bdf4b4b8", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-758708818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "891cfe67dd0044f3920402752215e361", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap695f64c6-49", "ovs_interfaceid": "695f64c6-49fa-4348-bb2b-910d7a8546e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.707256] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9e9f5-41cf-714e-3266-f2d2848b2400, 'name': SearchDatastore_Task, 'duration_secs': 0.010506} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.708199] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2777ff2a-7c26-4f5f-a90d-88225a3176ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.713578] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 940.713578] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dc0452-3225-6f14-ea20-e1267051f660" [ 940.713578] env[69994]: _type = "Task" [ 940.713578] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.722212] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dc0452-3225-6f14-ea20-e1267051f660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.778900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "e1c00159-d198-4858-b5a3-aa05152b1fda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.780995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.789898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19982d4d-7830-4335-bc60-13240314fcba tempest-ServersAdminTestJSON-854095043 tempest-ServersAdminTestJSON-854095043-project-member] Lock "298a4d59-733f-4cda-a9c2-80dc21be91ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.255s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.880020] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925731, 'name': CreateVM_Task, 'duration_secs': 0.269958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.881706] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.882165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.882329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.882750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 940.882891] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65492d32-104e-4fbb-a839-1f9cedc4a24f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.888035] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 940.888035] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5204f4f7-dcac-60f3-c462-ecbab4c8b84f" [ 940.888035] env[69994]: _type = "Task" [ 940.888035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.895785] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5204f4f7-dcac-60f3-c462-ecbab4c8b84f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.997025] env[69994]: DEBUG oslo_vmware.api [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148023} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.997025] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.997025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.997025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.997025] env[69994]: INFO nova.compute.manager [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Took 1.12 seconds to destroy the instance on the hypervisor. [ 940.997025] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.997025] env[69994]: DEBUG nova.compute.manager [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.997025] env[69994]: DEBUG nova.network.neutron [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.024729] env[69994]: DEBUG nova.compute.manager [req-5ea03a40-0d87-4726-bed1-90f90d2d541f req-83aeba86-5cfb-4953-a3df-2eea74d98226 service nova] [instance: 43119e21-5226-482c-b640-33e73051a563] Received event network-vif-deleted-7e211ec9-4ddf-4aa6-bcd7-59acb20fb7ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 941.160795] env[69994]: DEBUG oslo_concurrency.lockutils [req-32dbc5f8-2d6a-4b2f-abb1-99923a541666 req-52c9040a-924e-48a5-a68a-2d0573990d35 service nova] Releasing lock "refresh_cache-93087ec4-1d88-47cc-b1d2-0f1697556eae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.227089] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dc0452-3225-6f14-ea20-e1267051f660, 'name': SearchDatastore_Task, 'duration_secs': 0.011037} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.227371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.228342] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.228342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad6279de-2929-4c88-920c-8daa00ef18a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.236329] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 941.236329] env[69994]: value = "task-2925734" [ 941.236329] env[69994]: _type = "Task" [ 941.236329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.253123] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.282486] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 941.399838] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5204f4f7-dcac-60f3-c462-ecbab4c8b84f, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.400245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.400472] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.400717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.404043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.404043] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.404043] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53fbd968-c3b7-4ae0-98b2-d2361770373d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.411411] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.411411] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.411838] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dec3979a-4d04-48a5-b7fc-26765f5fe147 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.419037] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 941.419037] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bbe864-103e-b18b-b6df-bc9561225931" [ 941.419037] env[69994]: _type = "Task" [ 941.419037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.432430] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bbe864-103e-b18b-b6df-bc9561225931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.612578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18c2830-0a4e-4a2f-9d58-16031c514963 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.624937] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153509c4-edea-4717-87ec-01ade00e1d94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.664728] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fec2c3-2670-4659-a41f-e2845c77a377 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.674387] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e5ea24-2e66-4fc6-a368-2fb3f5568b43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.693637] env[69994]: DEBUG nova.compute.provider_tree [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.749889] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.750183] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 941.750431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 941.750923] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dae30dc-ebe2-482f-8260-179a6d54847f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.760925] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 941.760925] env[69994]: value = "task-2925735" [ 941.760925] env[69994]: _type = "Task" [ 941.760925] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.769348] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925735, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.778834] env[69994]: DEBUG nova.network.neutron [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.806325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.937410] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bbe864-103e-b18b-b6df-bc9561225931, 'name': SearchDatastore_Task, 'duration_secs': 0.015458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.938528] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f290536-dc97-4866-a2cc-71bac6bc8ba4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.944992] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 941.944992] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52892ee6-22a9-ba0d-fc11-43e4dc75c60c" [ 941.944992] env[69994]: _type = "Task" [ 941.944992] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.953226] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52892ee6-22a9-ba0d-fc11-43e4dc75c60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.200018] env[69994]: DEBUG nova.scheduler.client.report [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.274605] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925735, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090827} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.274605] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.274605] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed46400-8425-476e-81be-88b0d54665d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.293698] env[69994]: INFO nova.compute.manager [-] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Took 1.30 seconds to deallocate network for instance. [ 942.305099] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.307195] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d5a9e38-8278-4337-b02c-20503277bfb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.334623] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 942.334623] env[69994]: value = "task-2925736" [ 942.334623] env[69994]: _type = "Task" [ 942.334623] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.343652] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.457496] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52892ee6-22a9-ba0d-fc11-43e4dc75c60c, 'name': SearchDatastore_Task, 'duration_secs': 0.009549} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.457496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.457496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.457496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-237856c2-ce22-40e8-a115-43fe56e81745 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.464222] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 942.464222] env[69994]: value = "task-2925737" [ 942.464222] env[69994]: _type = "Task" [ 942.464222] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.472479] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.703555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.704492] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.707559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.980s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.711797] env[69994]: INFO nova.compute.claims [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.831356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.851805] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925736, 'name': ReconfigVM_Task, 'duration_secs': 0.272508} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.851805] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.851805] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36f54d88-b1fe-4ee5-886b-b21e898e3e88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.861697] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 942.861697] env[69994]: value = "task-2925738" [ 942.861697] env[69994]: _type = "Task" [ 942.861697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.876822] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925738, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.974506] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434613} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.974779] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.975022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.975274] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dfadd302-5d0b-44f2-bf60-ec0633ebcd70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.981942] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 942.981942] env[69994]: value = "task-2925739" [ 942.981942] env[69994]: _type = "Task" [ 942.981942] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.990770] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925739, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.060015] env[69994]: DEBUG nova.compute.manager [req-b3072972-2275-4dbd-b7a8-f6f51f0c33e3 req-361f0359-7384-4b88-86db-286fa9ed438f service nova] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Received event network-vif-deleted-695f64c6-49fa-4348-bb2b-910d7a8546e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.213914] env[69994]: DEBUG nova.compute.utils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.217383] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.221784] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.292289] env[69994]: DEBUG nova.policy [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0d877df23ce4955afb820954028bd8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f9571af19464ad18f6dea790a23bbcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.373335] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925738, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.491946] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925739, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060826} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.492291] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.493106] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d07f2d4-db6d-43de-a364-734bccb794d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.517730] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.518039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10dc010e-5cf6-49dc-ad55-0f99abbca01c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.538105] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 943.538105] env[69994]: value = "task-2925740" [ 943.538105] env[69994]: _type = "Task" [ 943.538105] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.546867] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925740, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.621989] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Successfully created port: b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.726901] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 943.876327] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925738, 'name': Rename_Task, 'duration_secs': 0.835579} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.876327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.876523] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b049b9e-9e01-41c7-b4b4-5911b46137e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.883177] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 943.883177] env[69994]: value = "task-2925741" [ 943.883177] env[69994]: _type = "Task" [ 943.883177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.894745] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925741, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.048885] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.050527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae67ef9f-d21e-425b-bd62-712f7c5c9671 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.057909] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69279b7-a582-4122-be92-47ab8bdfc800 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.091488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a099bdf-83d6-417a-93ce-0048f5bd7dde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.101393] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ae1101-084d-4cee-b284-f62efe42ab18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.116162] env[69994]: DEBUG nova.compute.provider_tree [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.393746] env[69994]: DEBUG oslo_vmware.api [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925741, 'name': PowerOnVM_Task, 'duration_secs': 0.487094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.394122] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.394354] env[69994]: INFO nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Took 8.55 seconds to spawn the instance on the hypervisor. [ 944.394541] env[69994]: DEBUG nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 944.395343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac283fd-45cf-44b7-b91c-3b24b7064eb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.549816] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925740, 'name': ReconfigVM_Task, 'duration_secs': 0.620538} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.550121] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.550807] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a96e61e-304d-4683-8084-e5eee260e7e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.558054] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 944.558054] env[69994]: value = "task-2925742" [ 944.558054] env[69994]: _type = "Task" [ 944.558054] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.567156] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925742, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.620807] env[69994]: DEBUG nova.scheduler.client.report [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.739731] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.763024] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.763213] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.763380] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.763546] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.763756] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.763948] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.764181] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.764346] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.764548] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.764669] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.764848] env[69994]: DEBUG nova.virt.hardware [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.765723] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f760e0-7982-489a-bdfe-7137df6f543e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.775064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eceaf61-b739-4303-9a33-2f9e73e49fae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.912788] env[69994]: INFO nova.compute.manager [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Took 20.74 seconds to build instance. [ 945.070591] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925742, 'name': Rename_Task, 'duration_secs': 0.156928} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.070926] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.071198] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8580b0d9-7237-43c2-9dd6-2277a8ae8658 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.078368] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 945.078368] env[69994]: value = "task-2925743" [ 945.078368] env[69994]: _type = "Task" [ 945.078368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.086820] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.093725] env[69994]: DEBUG nova.compute.manager [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Received event network-vif-plugged-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.093943] env[69994]: DEBUG oslo_concurrency.lockutils [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] Acquiring lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.094212] env[69994]: DEBUG oslo_concurrency.lockutils [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.094390] env[69994]: DEBUG oslo_concurrency.lockutils [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.094600] env[69994]: DEBUG nova.compute.manager [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] No waiting events found dispatching network-vif-plugged-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 945.094748] env[69994]: WARNING nova.compute.manager [req-cde95142-8e5e-4080-8cec-1ffba2ecbcfe req-c09e8a6d-9a4c-4585-8fe1-aa32393d8edc service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Received unexpected event network-vif-plugged-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b for instance with vm_state building and task_state spawning. [ 945.126489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.127033] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.133027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.058s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.133027] env[69994]: INFO nova.compute.claims [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.182891] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Successfully updated port: b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.415136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af336e0d-b05e-4ded-8b3e-f84f7f7bf18c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.093s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.590030] env[69994]: DEBUG oslo_vmware.api [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925743, 'name': PowerOnVM_Task, 'duration_secs': 0.427804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.590186] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.590362] env[69994]: INFO nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Took 5.32 seconds to spawn the instance on the hypervisor. [ 945.590594] env[69994]: DEBUG nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.591442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adfb163-e51e-4475-87ee-682d921c16f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.635884] env[69994]: DEBUG nova.compute.utils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 945.639847] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.639847] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.663859] env[69994]: INFO nova.compute.manager [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Rebuilding instance [ 945.689485] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.689485] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquired lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.689485] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.697622] env[69994]: DEBUG nova.policy [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffbbc2ca53924b3c88c6c7e80991dbbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f637f43fc4246aaa5cbbbbbf8e04389', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 945.710816] env[69994]: DEBUG nova.compute.manager [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.711679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c67c565-0923-4a0f-bc3e-c30952217093 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.973686] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Successfully created port: 29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.109352] env[69994]: INFO nova.compute.manager [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Took 16.54 seconds to build instance. [ 946.143166] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.226699] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.408879] env[69994]: DEBUG nova.network.neutron [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updating instance_info_cache with network_info: [{"id": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "address": "fa:16:3e:be:b1:86", "network": {"id": "0e81e6b3-a1fc-4df2-8a74-cf487e99419a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1345243400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f9571af19464ad18f6dea790a23bbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c979f78-8597-41f8-b1de-995014032689", "external-id": "nsx-vlan-transportzone-477", "segmentation_id": 477, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb852a2dc-fa", "ovs_interfaceid": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.511742] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce4838e-f538-49fe-bdd3-74de0c8ba0c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.519984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95b8b10-d2cc-42a0-b249-735f57f1ed93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.550106] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4591ef6-b9f0-4adf-9c5c-53ea53197c19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.558388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fef258a-2eaa-4e17-9c2a-3b38016a6f63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.572623] env[69994]: DEBUG nova.compute.provider_tree [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.612708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e6590a-c7de-43bf-ac82-8bbd8adf0a3a tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.053s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.729882] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.730270] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93076fbc-0555-4292-ada0-937bc23367c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.738959] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 946.738959] env[69994]: value = "task-2925744" [ 946.738959] env[69994]: _type = "Task" [ 946.738959] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.748038] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.916781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Releasing lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.917231] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Instance network_info: |[{"id": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "address": "fa:16:3e:be:b1:86", "network": {"id": "0e81e6b3-a1fc-4df2-8a74-cf487e99419a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1345243400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f9571af19464ad18f6dea790a23bbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c979f78-8597-41f8-b1de-995014032689", "external-id": "nsx-vlan-transportzone-477", "segmentation_id": 477, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb852a2dc-fa", "ovs_interfaceid": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 946.917781] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:b1:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c979f78-8597-41f8-b1de-995014032689', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.925999] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Creating folder: Project (6f9571af19464ad18f6dea790a23bbcd). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.926369] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b612559e-e16e-4cb5-8de9-662921d20bde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.938563] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Created folder: Project (6f9571af19464ad18f6dea790a23bbcd) in parent group-v587342. [ 946.938745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Creating folder: Instances. Parent ref: group-v587533. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.938931] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43486832-5fda-473d-8e4c-507fa89dcbba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.949044] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Created folder: Instances in parent group-v587533. [ 946.949331] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.949552] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.949796] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6eb8e7e6-06a6-4749-92a5-13992d992bfa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.969168] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.969168] env[69994]: value = "task-2925747" [ 946.969168] env[69994]: _type = "Task" [ 946.969168] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.978369] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925747, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.075621] env[69994]: DEBUG nova.scheduler.client.report [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.120451] env[69994]: DEBUG nova.compute.manager [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Received event network-changed-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.120706] env[69994]: DEBUG nova.compute.manager [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Refreshing instance network info cache due to event network-changed-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 947.120941] env[69994]: DEBUG oslo_concurrency.lockutils [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] Acquiring lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.121095] env[69994]: DEBUG oslo_concurrency.lockutils [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] Acquired lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.121257] env[69994]: DEBUG nova.network.neutron [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Refreshing network info cache for port b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.153440] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.156264] env[69994]: INFO nova.compute.manager [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Rebuilding instance [ 947.175051] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.175319] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.175478] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.175657] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.175803] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.175951] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.176176] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.176339] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.176522] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.176670] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.176844] env[69994]: DEBUG nova.virt.hardware [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.178764] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d008db86-5d63-4474-a0e5-367e28513a47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.190722] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48b48c3-40a1-45ce-a4fc-eee37953c959 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.212837] env[69994]: DEBUG nova.compute.manager [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.213649] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed93a6b0-3d2e-44fb-b709-6cbde24d4bb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.248907] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925744, 'name': PowerOffVM_Task, 'duration_secs': 0.267623} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.249184] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.249423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.250212] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90d3714-077e-4b05-8692-499ffff070a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.257824] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.258016] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-267e34ff-5c86-46bc-b586-8ea6c59cf2fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.463606] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Successfully updated port: 29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.480362] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925747, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.580481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.581123] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 947.586033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.075s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.586830] env[69994]: DEBUG nova.objects.instance [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'resources' on Instance uuid b4c6b628-426e-4efc-b8b6-0c2937ef6df3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.833278] env[69994]: DEBUG nova.network.neutron [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updated VIF entry in instance network info cache for port b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.833657] env[69994]: DEBUG nova.network.neutron [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updating instance_info_cache with network_info: [{"id": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "address": "fa:16:3e:be:b1:86", "network": {"id": "0e81e6b3-a1fc-4df2-8a74-cf487e99419a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1345243400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f9571af19464ad18f6dea790a23bbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c979f78-8597-41f8-b1de-995014032689", "external-id": "nsx-vlan-transportzone-477", "segmentation_id": 477, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb852a2dc-fa", "ovs_interfaceid": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.909773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 947.910038] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 947.910516] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.910598] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5953bd27-5a1d-4911-9963-ffada1f8a39a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.917214] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 947.917214] env[69994]: value = "task-2925749" [ 947.917214] env[69994]: _type = "Task" [ 947.917214] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.925862] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.967670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.967817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquired lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.967982] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.980239] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925747, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.090020] env[69994]: DEBUG nova.compute.utils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 948.094416] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 948.094587] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 948.135119] env[69994]: DEBUG nova.policy [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '550fe2bfeab14f0fa409c65d98954e7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21bf4c6f3b2c45218949b0e6c1eb84fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 948.226901] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.227214] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26729eb6-fdd1-4ffc-96f9-b6ecfbda6129 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.236887] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 948.236887] env[69994]: value = "task-2925750" [ 948.236887] env[69994]: _type = "Task" [ 948.236887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.246361] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.337211] env[69994]: DEBUG oslo_concurrency.lockutils [req-dbd73fbb-a519-4222-b9a8-0b2d386ce2e5 req-59e680cc-8436-4297-9962-712dc4f0f6b2 service nova] Releasing lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.371914] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bf1c92-ba82-482a-8745-83002d44bf1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.380055] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4b2509-f17c-4f19-a770-f3c8c0c1297a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.413519] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Successfully created port: 43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.415921] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a1eaab-18e0-4769-b661-d7db2261d2d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.429916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23157a3c-4548-48ec-af65-3e10f21bcc07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.433609] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.433842] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.434028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.434213] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.447471] env[69994]: DEBUG nova.compute.provider_tree [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.483913] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925747, 'name': CreateVM_Task, 'duration_secs': 1.019369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.484442] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.485312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.485440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.485934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.486356] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ca749f4-0afc-4c0b-8508-2b73d705f57e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.491452] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 948.491452] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d3c61-a758-ef5a-896a-41001eeaecf5" [ 948.491452] env[69994]: _type = "Task" [ 948.491452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.500469] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d3c61-a758-ef5a-896a-41001eeaecf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.536960] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.595307] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 948.749213] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.801295] env[69994]: DEBUG nova.network.neutron [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Updating instance_info_cache with network_info: [{"id": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "address": "fa:16:3e:31:2a:91", "network": {"id": "a15116b2-0901-464b-a297-b1d1d990e991", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2134345382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f637f43fc4246aaa5cbbbbbf8e04389", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b654a3-e8", "ovs_interfaceid": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.950334] env[69994]: DEBUG nova.scheduler.client.report [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.002982] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d3c61-a758-ef5a-896a-41001eeaecf5, 'name': SearchDatastore_Task, 'duration_secs': 0.010471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.003328] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.003711] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.003955] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.004117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.004302] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.004848] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80f20e53-06ae-4458-bf08-9815e60a0f1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.013605] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.013801] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.014559] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5076e2ab-2566-4aec-8d25-70813431aed0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.019574] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 949.019574] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526b9430-2ef7-7f1f-d7ce-529390ce4aa8" [ 949.019574] env[69994]: _type = "Task" [ 949.019574] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.026891] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526b9430-2ef7-7f1f-d7ce-529390ce4aa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.164547] env[69994]: DEBUG nova.compute.manager [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Received event network-vif-plugged-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.164547] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Acquiring lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.164982] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.164982] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.165204] env[69994]: DEBUG nova.compute.manager [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] No waiting events found dispatching network-vif-plugged-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 949.165279] env[69994]: WARNING nova.compute.manager [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Received unexpected event network-vif-plugged-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 for instance with vm_state building and task_state spawning. [ 949.166022] env[69994]: DEBUG nova.compute.manager [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Received event network-changed-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.166022] env[69994]: DEBUG nova.compute.manager [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Refreshing instance network info cache due to event network-changed-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 949.166022] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Acquiring lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.251525] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925750, 'name': PowerOffVM_Task, 'duration_secs': 0.833242} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.251809] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.252110] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.252874] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5f7120-1e94-4d48-b1e0-35fcc0891906 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.259489] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.259707] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af8b18a9-7e3c-4359-aef5-073797752dfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.284461] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.284736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.284946] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Deleting the datastore file [datastore1] 15595947-b944-4c82-90ae-883ed951c909 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.285215] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4426dfb-e6ee-4ee7-a475-f66c395497bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.291060] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 949.291060] env[69994]: value = "task-2925752" [ 949.291060] env[69994]: _type = "Task" [ 949.291060] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.298726] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.304314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Releasing lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.304666] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Instance network_info: |[{"id": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "address": "fa:16:3e:31:2a:91", "network": {"id": "a15116b2-0901-464b-a297-b1d1d990e991", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2134345382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f637f43fc4246aaa5cbbbbbf8e04389", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b654a3-e8", "ovs_interfaceid": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.305016] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Acquired lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.305268] env[69994]: DEBUG nova.network.neutron [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Refreshing network info cache for port 29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.308051] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:2a:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a58387dd-f438-4913-af6a-fafb734cd881', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29b654a3-e8bd-4fa0-a914-d1f1ebb404f7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.315529] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Creating folder: Project (6f637f43fc4246aaa5cbbbbbf8e04389). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.316582] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11591cb9-a104-488e-a4c9-a009c916d826 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.326762] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Created folder: Project (6f637f43fc4246aaa5cbbbbbf8e04389) in parent group-v587342. [ 949.326942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Creating folder: Instances. Parent ref: group-v587536. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.327176] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53261eb1-3405-490f-b60e-7d46d39bc04f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.338186] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Created folder: Instances in parent group-v587536. [ 949.338401] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.338616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.338799] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-239d12ac-02a9-4f8d-94ab-4a8689faf7d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.356864] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.356864] env[69994]: value = "task-2925755" [ 949.356864] env[69994]: _type = "Task" [ 949.356864] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.363937] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925755, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.454897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.457666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.454s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.459359] env[69994]: INFO nova.compute.claims [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.470519] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 949.470757] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.470918] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 949.471113] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.471265] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 949.471412] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 949.471617] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 949.471807] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 949.472130] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 949.472258] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 949.472438] env[69994]: DEBUG nova.virt.hardware [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 949.473297] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77318b92-6c8e-4b3b-b5eb-1a339f6d79dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.481243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c8eeff-af3e-4b26-978d-db0631f1da31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.486298] env[69994]: INFO nova.scheduler.client.report [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted allocations for instance b4c6b628-426e-4efc-b8b6-0c2937ef6df3 [ 949.495631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:dd:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bda4ed5-72d2-44ec-bf8e-43efc8fab0db', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.503668] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.506744] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.507302] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74e820a5-68c1-44b9-8983-1f044d9abd9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.531413] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526b9430-2ef7-7f1f-d7ce-529390ce4aa8, 'name': SearchDatastore_Task, 'duration_secs': 0.008662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.533105] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.533105] env[69994]: value = "task-2925756" [ 949.533105] env[69994]: _type = "Task" [ 949.533105] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.533306] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d463693b-6be9-426b-81b7-883dc627b663 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.541652] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 949.541652] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5266396e-9bf6-e464-e7bd-9c345a4f8631" [ 949.541652] env[69994]: _type = "Task" [ 949.541652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.544786] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925756, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.553087] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5266396e-9bf6-e464-e7bd-9c345a4f8631, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.604680] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 949.633075] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 949.633380] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.633833] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 949.633833] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.633974] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 949.634258] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 949.634533] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 949.634747] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 949.634940] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 949.635123] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 949.635304] env[69994]: DEBUG nova.virt.hardware [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 949.636253] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a77f45-2014-41c7-a761-0b99cf9cdcbc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.647044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c6aa0a-cc67-424e-9b2d-a28b9f6aec72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.801652] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089715} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.802339] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.802339] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.802465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.868176] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925755, 'name': CreateVM_Task, 'duration_secs': 0.352509} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.869041] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 949.869151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.869774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.869774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 949.870017] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d4b38b-9892-42e0-b976-2d19ee0e1737 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.875102] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 949.875102] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52760594-3f29-9b67-3a35-bc945684b870" [ 949.875102] env[69994]: _type = "Task" [ 949.875102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.883299] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52760594-3f29-9b67-3a35-bc945684b870, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.010811] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1c531023-d030-4ddc-b192-a3d82589260b tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "b4c6b628-426e-4efc-b8b6-0c2937ef6df3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.043s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.045696] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925756, 'name': CreateVM_Task, 'duration_secs': 0.355342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.049018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.049926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.055534] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5266396e-9bf6-e464-e7bd-9c345a4f8631, 'name': SearchDatastore_Task, 'duration_secs': 0.011177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.055780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.056038] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 660277f8-a7ff-43a9-8068-15e3db5a1069/660277f8-a7ff-43a9-8068-15e3db5a1069.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 950.056294] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1b9c66e-39d5-4a1d-af68-7f69dda05e6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.063151] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 950.063151] env[69994]: value = "task-2925757" [ 950.063151] env[69994]: _type = "Task" [ 950.063151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.078030] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.391429] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52760594-3f29-9b67-3a35-bc945684b870, 'name': SearchDatastore_Task, 'duration_secs': 0.009584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.392261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.392692] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.393156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.393477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.393852] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.397020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.397020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 950.397020] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-121195c6-c352-419f-b2c4-6e9107bee2d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.397623] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9492b36-9c2b-4c3b-96e7-a7887af563b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.405054] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 950.405054] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e7b5-3c37-72ba-3ec2-72deed5bace9" [ 950.405054] env[69994]: _type = "Task" [ 950.405054] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.416166] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e7b5-3c37-72ba-3ec2-72deed5bace9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.418146] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.418465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.419315] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa55a60d-fbdc-452e-b2c0-3cefc2b6ad0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.426080] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 950.426080] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c78b4-3700-6638-930d-abdc87481105" [ 950.426080] env[69994]: _type = "Task" [ 950.426080] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.430715] env[69994]: DEBUG nova.network.neutron [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Updated VIF entry in instance network info cache for port 29b654a3-e8bd-4fa0-a914-d1f1ebb404f7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.431195] env[69994]: DEBUG nova.network.neutron [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Updating instance_info_cache with network_info: [{"id": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "address": "fa:16:3e:31:2a:91", "network": {"id": "a15116b2-0901-464b-a297-b1d1d990e991", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2134345382-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f637f43fc4246aaa5cbbbbbf8e04389", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b654a3-e8", "ovs_interfaceid": "29b654a3-e8bd-4fa0-a914-d1f1ebb404f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.440519] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c78b4-3700-6638-930d-abdc87481105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.576968] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.576968] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 660277f8-a7ff-43a9-8068-15e3db5a1069/660277f8-a7ff-43a9-8068-15e3db5a1069.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.576968] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.577221] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Successfully updated port: 43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 950.578202] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91859ae1-d2b3-42d4-9f66-ab48a7fcffac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.589438] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 950.589438] env[69994]: value = "task-2925758" [ 950.589438] env[69994]: _type = "Task" [ 950.589438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.601115] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.741226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06405e2b-2f7e-4240-a850-41125bba6539 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.748866] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107bc002-2045-4e36-8c1a-917990e84ddf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.779133] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64db5666-b676-4585-8087-fb46471d0f9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.786504] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6382256-c64d-44c5-92b2-38a377f2a3ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.799842] env[69994]: DEBUG nova.compute.provider_tree [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.831810] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 950.832073] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.832239] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.832421] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.832567] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.832712] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 950.832920] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 950.833095] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 950.833265] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 950.833426] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 950.833595] env[69994]: DEBUG nova.virt.hardware [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 950.834444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a2d2e5-6aa1-4d37-93d8-718d7a5afab0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.842022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2144a374-4d4b-4662-943c-d6b72bcd0e52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.856549] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.862233] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.862568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.862709] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1a135c5-4437-46ab-9a80-1b00098e84c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.878662] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.878662] env[69994]: value = "task-2925759" [ 950.878662] env[69994]: _type = "Task" [ 950.878662] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.886014] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925759, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.916273] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e7b5-3c37-72ba-3ec2-72deed5bace9, 'name': SearchDatastore_Task, 'duration_secs': 0.063026} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.917032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.917032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.917117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.936029] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c78b4-3700-6638-930d-abdc87481105, 'name': SearchDatastore_Task, 'duration_secs': 0.044118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.937073] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07fccd43-8c7b-4ac5-afd1-ca3a17df56e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.941506] env[69994]: DEBUG oslo_concurrency.lockutils [req-47117422-6052-46a0-9510-88ea194d35bb req-df9896cd-9b35-4a46-a153-9a37bed83820 service nova] Releasing lock "refresh_cache-1e19dc4d-c3dd-41e7-819f-30d54cb1390e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.943454] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 950.943454] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52732579-6d96-f121-069b-f2e8894ab6f1" [ 950.943454] env[69994]: _type = "Task" [ 950.943454] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.951604] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52732579-6d96-f121-069b-f2e8894ab6f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.084207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.084368] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.084533] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.099763] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060623} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.100049] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.100822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd3056f-8ef7-4891-bdbf-f525945d2e03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.124445] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 660277f8-a7ff-43a9-8068-15e3db5a1069/660277f8-a7ff-43a9-8068-15e3db5a1069.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.124755] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-376971a0-62d7-4f1d-9a96-0aba10e8333f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.144760] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 951.144760] env[69994]: value = "task-2925760" [ 951.144760] env[69994]: _type = "Task" [ 951.144760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.152940] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.163907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "76dbf172-10b2-4439-9d2a-8226ba46062d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.164288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.193172] env[69994]: DEBUG nova.compute.manager [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Received event network-vif-plugged-43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.193514] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Acquiring lock "ee68a538-d803-4bd6-9117-b021b28da899-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.193811] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Lock "ee68a538-d803-4bd6-9117-b021b28da899-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.194065] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Lock "ee68a538-d803-4bd6-9117-b021b28da899-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.194280] env[69994]: DEBUG nova.compute.manager [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] No waiting events found dispatching network-vif-plugged-43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 951.194510] env[69994]: WARNING nova.compute.manager [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Received unexpected event network-vif-plugged-43ef7f11-6496-44e9-a438-979f2407ad8b for instance with vm_state building and task_state spawning. [ 951.194753] env[69994]: DEBUG nova.compute.manager [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Received event network-changed-43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.194979] env[69994]: DEBUG nova.compute.manager [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Refreshing instance network info cache due to event network-changed-43ef7f11-6496-44e9-a438-979f2407ad8b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 951.195245] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Acquiring lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.302573] env[69994]: DEBUG nova.scheduler.client.report [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.380787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.381052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.395298] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925759, 'name': CreateVM_Task, 'duration_secs': 0.435776} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.396087] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.397092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.397092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.397216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 951.397709] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffde7ebe-f4e7-4691-b00b-ac0d2d6be6ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.403033] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 951.403033] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52399dbf-539c-a829-bd7d-485406c8bc65" [ 951.403033] env[69994]: _type = "Task" [ 951.403033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.412204] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52399dbf-539c-a829-bd7d-485406c8bc65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.454814] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52732579-6d96-f121-069b-f2e8894ab6f1, 'name': SearchDatastore_Task, 'duration_secs': 0.020782} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.455138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.455510] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1e19dc4d-c3dd-41e7-819f-30d54cb1390e/1e19dc4d-c3dd-41e7-819f-30d54cb1390e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.455737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.455960] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.457045] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1886010-4d4f-4840-a53b-294449add325 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.458374] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6854de32-8544-4ea4-bebc-dcd5db19a73d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.465464] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 951.465464] env[69994]: value = "task-2925761" [ 951.465464] env[69994]: _type = "Task" [ 951.465464] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.470458] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.470458] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.471603] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc300b90-e4c4-48ea-8a37-05d5616d08e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.477500] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.480204] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 951.480204] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528b5e9c-50ea-ff19-35ad-124ce413b294" [ 951.480204] env[69994]: _type = "Task" [ 951.480204] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.488103] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528b5e9c-50ea-ff19-35ad-124ce413b294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.629273] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 951.658218] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925760, 'name': ReconfigVM_Task, 'duration_secs': 0.281488} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.658948] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 660277f8-a7ff-43a9-8068-15e3db5a1069/660277f8-a7ff-43a9-8068-15e3db5a1069.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.659682] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0540ef3c-4a6c-4009-b818-ce6dd01f5154 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.667635] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.670542] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 951.670542] env[69994]: value = "task-2925762" [ 951.670542] env[69994]: _type = "Task" [ 951.670542] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.681905] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925762, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.808112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.808522] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.812582] env[69994]: DEBUG nova.network.neutron [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Updating instance_info_cache with network_info: [{"id": "43ef7f11-6496-44e9-a438-979f2407ad8b", "address": "fa:16:3e:63:37:6c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ef7f11-64", "ovs_interfaceid": "43ef7f11-6496-44e9-a438-979f2407ad8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.815196] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.347s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.815369] env[69994]: INFO nova.compute.claims [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.885454] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.915416] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52399dbf-539c-a829-bd7d-485406c8bc65, 'name': SearchDatastore_Task, 'duration_secs': 0.009388} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.915959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.916318] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.916683] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.975247] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.975537] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1e19dc4d-c3dd-41e7-819f-30d54cb1390e/1e19dc4d-c3dd-41e7-819f-30d54cb1390e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.975751] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.976010] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8df9480c-1171-47d0-abb3-ef0385cc387d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.986023] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 951.986023] env[69994]: value = "task-2925763" [ 951.986023] env[69994]: _type = "Task" [ 951.986023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.993234] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528b5e9c-50ea-ff19-35ad-124ce413b294, 'name': SearchDatastore_Task, 'duration_secs': 0.017903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.994462] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a506e0c6-2e87-4f9c-8528-109b8c949137 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.999435] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.002264] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 952.002264] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4fdf1-e5a5-35f4-793d-d54d59ac178a" [ 952.002264] env[69994]: _type = "Task" [ 952.002264] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.009655] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4fdf1-e5a5-35f4-793d-d54d59ac178a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.184580] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925762, 'name': Rename_Task, 'duration_secs': 0.232738} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.184853] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.185112] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4b0f97b-1118-48bd-ba76-c70c901c10ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.191449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.192660] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 952.192660] env[69994]: value = "task-2925764" [ 952.192660] env[69994]: _type = "Task" [ 952.192660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.199869] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925764, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.318788] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.319229] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Instance network_info: |[{"id": "43ef7f11-6496-44e9-a438-979f2407ad8b", "address": "fa:16:3e:63:37:6c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ef7f11-64", "ovs_interfaceid": "43ef7f11-6496-44e9-a438-979f2407ad8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 952.320645] env[69994]: DEBUG nova.compute.utils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.324106] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Acquired lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.324229] env[69994]: DEBUG nova.network.neutron [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Refreshing network info cache for port 43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.325163] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:37:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43ef7f11-6496-44e9-a438-979f2407ad8b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 952.333515] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.333757] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.333931] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.337319] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 952.337572] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2da3459a-15b1-4ded-9ec1-826689425cc0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.358622] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 952.358622] env[69994]: value = "task-2925765" [ 952.358622] env[69994]: _type = "Task" [ 952.358622] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.366482] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925765, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.385716] env[69994]: DEBUG nova.policy [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.405826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.496171] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063382} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.499432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.499432] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0b8e64-44f1-4955-b1eb-c6a0e21d34a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.520549] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 1e19dc4d-c3dd-41e7-819f-30d54cb1390e/1e19dc4d-c3dd-41e7-819f-30d54cb1390e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.523904] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd6fa5da-a80b-4526-b290-ef047891dfe5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.544130] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4fdf1-e5a5-35f4-793d-d54d59ac178a, 'name': SearchDatastore_Task, 'duration_secs': 0.00856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.545497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.545778] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.546129] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 952.546129] env[69994]: value = "task-2925766" [ 952.546129] env[69994]: _type = "Task" [ 952.546129] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.546355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.546549] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.546782] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3d023c5-f71b-4b0b-ac98-7114b8feee10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.548836] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67ec60be-cb64-4958-85a1-a114ec9a4418 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.558795] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.560924] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 952.560924] env[69994]: value = "task-2925767" [ 952.560924] env[69994]: _type = "Task" [ 952.560924] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.562341] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.562341] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.566377] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5af0c7d5-1152-4151-8e3f-d38e77182a6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.575726] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 952.575726] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1045b-029d-61a7-b086-6c21a9628d8d" [ 952.575726] env[69994]: _type = "Task" [ 952.575726] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.578799] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.586640] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1045b-029d-61a7-b086-6c21a9628d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.703371] env[69994]: DEBUG oslo_vmware.api [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925764, 'name': PowerOnVM_Task, 'duration_secs': 0.48475} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.703685] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.703893] env[69994]: INFO nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Took 7.96 seconds to spawn the instance on the hypervisor. [ 952.704089] env[69994]: DEBUG nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.704936] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0658888e-09d4-4b79-99e6-2421e3fccd54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.714431] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Successfully created port: 5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.824705] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 952.871945] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925765, 'name': CreateVM_Task, 'duration_secs': 0.334695} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.874724] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.876017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.876099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.876431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 952.876733] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3a9adbb-944e-43f1-9333-070f07160a5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.881896] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 952.881896] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52034d19-99a5-8dca-2f9a-cadd15cab895" [ 952.881896] env[69994]: _type = "Task" [ 952.881896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.895350] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52034d19-99a5-8dca-2f9a-cadd15cab895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.059197] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.070676] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925767, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.089036] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1045b-029d-61a7-b086-6c21a9628d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.011621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.089891] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17771c33-1e17-4ba2-89cf-f8adf2dcd6a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.094952] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 953.094952] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254c271-2a7a-cd5f-e322-013d3499e706" [ 953.094952] env[69994]: _type = "Task" [ 953.094952] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.107638] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254c271-2a7a-cd5f-e322-013d3499e706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.212113] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798e8d01-829e-4693-8db5-6be1e373cbfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.216566] env[69994]: DEBUG nova.network.neutron [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Updated VIF entry in instance network info cache for port 43ef7f11-6496-44e9-a438-979f2407ad8b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 953.216940] env[69994]: DEBUG nova.network.neutron [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Updating instance_info_cache with network_info: [{"id": "43ef7f11-6496-44e9-a438-979f2407ad8b", "address": "fa:16:3e:63:37:6c", "network": {"id": "f65b1bee-fee5-4178-a2b2-23f39497424a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-108687217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21bf4c6f3b2c45218949b0e6c1eb84fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ef7f11-64", "ovs_interfaceid": "43ef7f11-6496-44e9-a438-979f2407ad8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.226045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a0308c-076b-4dd3-9948-600ca7a0e6b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.230958] env[69994]: INFO nova.compute.manager [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Took 22.38 seconds to build instance. [ 953.261163] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9be31b-acc9-43c4-93a8-b2a2d2e3788b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.269704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a7ac11-1213-470f-b07c-61ba5b80b100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.283642] env[69994]: DEBUG nova.compute.provider_tree [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.392399] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52034d19-99a5-8dca-2f9a-cadd15cab895, 'name': SearchDatastore_Task, 'duration_secs': 0.059846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.392689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.392917] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 953.393144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.558783] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925766, 'name': ReconfigVM_Task, 'duration_secs': 0.601145} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.559176] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 1e19dc4d-c3dd-41e7-819f-30d54cb1390e/1e19dc4d-c3dd-41e7-819f-30d54cb1390e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.559835] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-096382f3-417c-4642-a58c-62e76b72f354 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.566425] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 953.566425] env[69994]: value = "task-2925768" [ 953.566425] env[69994]: _type = "Task" [ 953.566425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.572217] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531244} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.572722] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.572931] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.573164] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f36ea5d-3bf3-4eea-a9ea-a30123ef07ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.577558] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925768, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.581300] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 953.581300] env[69994]: value = "task-2925769" [ 953.581300] env[69994]: _type = "Task" [ 953.581300] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.589525] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925769, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.606494] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254c271-2a7a-cd5f-e322-013d3499e706, 'name': SearchDatastore_Task, 'duration_secs': 0.01519} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.606494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.606702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.607030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.607145] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.607364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8ff1dfe-f302-4e15-9853-6c4ccc417e79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.609331] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e716bb89-43ae-41fd-b9a9-3f7c4ff9fe2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.615945] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 953.615945] env[69994]: value = "task-2925770" [ 953.615945] env[69994]: _type = "Task" [ 953.615945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.617256] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.617579] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 953.621079] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca3c9842-6a13-45cd-9938-fbf73c9151cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.628226] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 953.628226] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5221476b-fec8-2375-4b60-a3852d19a6c2" [ 953.628226] env[69994]: _type = "Task" [ 953.628226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.631283] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.638921] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5221476b-fec8-2375-4b60-a3852d19a6c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.724656] env[69994]: DEBUG oslo_concurrency.lockutils [req-f21df0a9-3940-46df-911c-61ae74ded7cf req-c9d24777-c38c-45f8-abb8-7b780de23c1b service nova] Releasing lock "refresh_cache-ee68a538-d803-4bd6-9117-b021b28da899" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.732499] env[69994]: DEBUG oslo_concurrency.lockutils [None req-213532d1-be17-4233-93a3-b3b74df7bbb5 tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.893s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.806586] env[69994]: ERROR nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [req-f64c2678-a8fd-4f87-beee-02744aaf218f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f64c2678-a8fd-4f87-beee-02744aaf218f"}]} [ 953.825057] env[69994]: DEBUG nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 953.836899] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 953.841532] env[69994]: DEBUG nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 953.841715] env[69994]: DEBUG nova.compute.provider_tree [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.855128] env[69994]: DEBUG nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 953.870657] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 953.870940] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.871128] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 953.871328] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.871477] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 953.871633] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 953.871854] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 953.872117] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 953.872206] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 953.872368] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 953.872557] env[69994]: DEBUG nova.virt.hardware [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 953.873490] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d60ddc-9731-4503-9b05-25ad2e1e0ff2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.882825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778918dc-ed25-4462-a4f5-8508e5fcfd20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.888143] env[69994]: DEBUG nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 954.078616] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925768, 'name': Rename_Task, 'duration_secs': 0.144142} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.078990] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.079240] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca47cfaf-b73d-4ccb-b603-53e9a4a303bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.086170] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 954.086170] env[69994]: value = "task-2925771" [ 954.086170] env[69994]: _type = "Task" [ 954.086170] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.095017] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925769, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.095278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.096309] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da0b89b-8463-42ac-90dc-4a1c275151ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.101581] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.121442] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.124643] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11cee6ab-f652-4db0-97a1-1f0841ab9871 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.151757] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476249} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.158697] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.158962] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.159272] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5221476b-fec8-2375-4b60-a3852d19a6c2, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.159534] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 954.159534] env[69994]: value = "task-2925772" [ 954.159534] env[69994]: _type = "Task" [ 954.159534] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.159944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71575701-6806-4b28-b183-3fdb722fb1b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.162359] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f60c024f-b200-4a29-82b4-a508fb0f543e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.173674] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.178917] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 954.178917] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5227cf4c-45c4-4f16-c79c-7083058ddfba" [ 954.178917] env[69994]: _type = "Task" [ 954.178917] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.179220] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 954.179220] env[69994]: value = "task-2925773" [ 954.179220] env[69994]: _type = "Task" [ 954.179220] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.191528] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5227cf4c-45c4-4f16-c79c-7083058ddfba, 'name': SearchDatastore_Task, 'duration_secs': 0.01353} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.196886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.196886] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ee68a538-d803-4bd6-9117-b021b28da899/ee68a538-d803-4bd6-9117-b021b28da899.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 954.197164] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.197580] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efaaf894-817b-4fb1-a742-0de5b43098d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.203313] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 954.203313] env[69994]: value = "task-2925774" [ 954.203313] env[69994]: _type = "Task" [ 954.203313] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.215197] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.311131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a846d4-72eb-46f2-89f4-6c4a32b4d2c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.318950] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7903dc63-96f6-4050-b9f7-ba899ce7ca54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.348828] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d503c0f4-664e-4621-99b4-5cd25c52ab55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.356502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f04b58-4afe-47f9-bc39-fcaa5bfccb6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.369677] env[69994]: DEBUG nova.compute.provider_tree [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.536689] env[69994]: DEBUG nova.compute.manager [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Received event network-changed-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.536933] env[69994]: DEBUG nova.compute.manager [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Refreshing instance network info cache due to event network-changed-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.539705] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Acquiring lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.539886] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Acquired lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.540079] env[69994]: DEBUG nova.network.neutron [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Refreshing network info cache for port b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.599479] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.674466] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.694442] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124516} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.694442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.695296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a64b79-dc60-49cb-9c68-17fe84e06397 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.728131] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.729499] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Successfully updated port: 5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.735467] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c319bd0-c4e6-4633-8447-b6225a164e24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.768272] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523633} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.770055] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ee68a538-d803-4bd6-9117-b021b28da899/ee68a538-d803-4bd6-9117-b021b28da899.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.770363] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.770745] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 954.770745] env[69994]: value = "task-2925775" [ 954.770745] env[69994]: _type = "Task" [ 954.770745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.770947] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e25e62d-52e7-4610-9202-4a547fa36b0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.781654] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925775, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.782949] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 954.782949] env[69994]: value = "task-2925776" [ 954.782949] env[69994]: _type = "Task" [ 954.782949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.791377] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.903326] env[69994]: DEBUG nova.scheduler.client.report [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 954.903634] env[69994]: DEBUG nova.compute.provider_tree [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 103 to 104 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 954.903800] env[69994]: DEBUG nova.compute.provider_tree [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.097391] env[69994]: DEBUG oslo_vmware.api [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925771, 'name': PowerOnVM_Task, 'duration_secs': 0.764635} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.097812] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.097933] env[69994]: INFO nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Took 7.94 seconds to spawn the instance on the hypervisor. [ 955.098031] env[69994]: DEBUG nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.098815] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b39198f-9cb6-4d33-b316-679a4dd06fe0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.174626] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925772, 'name': ReconfigVM_Task, 'duration_secs': 0.607698} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.177732] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3/83cef95b-99a5-4e6e-8258-79b380b595b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.178832] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae5491d5-1f3e-45ce-a650-1e579445bc92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.185318] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 955.185318] env[69994]: value = "task-2925777" [ 955.185318] env[69994]: _type = "Task" [ 955.185318] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.193219] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925777, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.262295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.262295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.262295] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.283419] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925775, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.291133] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.291989] env[69994]: DEBUG nova.network.neutron [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updated VIF entry in instance network info cache for port b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.292351] env[69994]: DEBUG nova.network.neutron [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updating instance_info_cache with network_info: [{"id": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "address": "fa:16:3e:be:b1:86", "network": {"id": "0e81e6b3-a1fc-4df2-8a74-cf487e99419a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1345243400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f9571af19464ad18f6dea790a23bbcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c979f78-8597-41f8-b1de-995014032689", "external-id": "nsx-vlan-transportzone-477", "segmentation_id": 477, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb852a2dc-fa", "ovs_interfaceid": "b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.293959] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 955.294862] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab168b5-51bd-474b-bb83-878686628004 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.318864] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] ee68a538-d803-4bd6-9117-b021b28da899/ee68a538-d803-4bd6-9117-b021b28da899.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.319306] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9c3a995-3581-4530-8df4-63b90dbd2e88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.338646] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 955.338646] env[69994]: value = "task-2925778" [ 955.338646] env[69994]: _type = "Task" [ 955.338646] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.346801] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.408459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.594s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.409179] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 955.414375] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.891s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.414597] env[69994]: DEBUG nova.objects.instance [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lazy-loading 'resources' on Instance uuid 43119e21-5226-482c-b640-33e73051a563 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.617527] env[69994]: INFO nova.compute.manager [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Took 22.91 seconds to build instance. [ 955.695671] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925777, 'name': Rename_Task, 'duration_secs': 0.233956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.695940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.696188] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b90be17b-840a-42db-b3a0-4c95ef4b8dbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.703018] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 955.703018] env[69994]: value = "task-2925779" [ 955.703018] env[69994]: _type = "Task" [ 955.703018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.710968] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.782043] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925775, 'name': ReconfigVM_Task, 'duration_secs': 0.723747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.782329] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 15595947-b944-4c82-90ae-883ed951c909/15595947-b944-4c82-90ae-883ed951c909.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.782954] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8f7616e-e7e4-464a-bedf-d612bf1f016f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.788735] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 955.788735] env[69994]: value = "task-2925780" [ 955.788735] env[69994]: _type = "Task" [ 955.788735] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.796774] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Releasing lock "refresh_cache-660277f8-a7ff-43a9-8068-15e3db5a1069" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.797030] env[69994]: DEBUG nova.compute.manager [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Received event network-vif-plugged-5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.797233] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Acquiring lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.797437] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.797597] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.797762] env[69994]: DEBUG nova.compute.manager [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] No waiting events found dispatching network-vif-plugged-5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 955.797934] env[69994]: WARNING nova.compute.manager [req-9b255878-062a-4ec9-9dea-8a727537b1e1 req-8b6648aa-f639-4335-ac57-49cbfd68ab61 service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Received unexpected event network-vif-plugged-5e15c09f-a8a4-4350-aaa1-705508f4deb7 for instance with vm_state building and task_state spawning. [ 955.798287] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925780, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.799057] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.851279] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.918085] env[69994]: DEBUG nova.compute.utils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.923040] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.923040] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.009013] env[69994]: DEBUG nova.network.neutron [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Updating instance_info_cache with network_info: [{"id": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "address": "fa:16:3e:12:77:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e15c09f-a8", "ovs_interfaceid": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.014011] env[69994]: DEBUG nova.policy [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 956.119272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca65d2c1-4fd7-4cdf-8e3d-6520dc216c67 tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.419s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.213885] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925779, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.246925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376a407d-849d-4de0-9ce0-1915228e9b0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.255318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25729af3-98a1-4b52-a53e-4ec0e98f0886 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.288250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95989883-407c-440f-9b93-8c471ccd0927 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.296371] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Successfully created port: 2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.303698] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925780, 'name': Rename_Task, 'duration_secs': 0.164355} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.305008] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec591e7d-4222-4b98-bae7-125733f1fd52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.309051] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.309263] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7709ac37-515e-4eda-bfd9-357e4a7ba79e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.321861] env[69994]: DEBUG nova.compute.provider_tree [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.324350] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 956.324350] env[69994]: value = "task-2925781" [ 956.324350] env[69994]: _type = "Task" [ 956.324350] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.332367] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925781, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.350305] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925778, 'name': ReconfigVM_Task, 'duration_secs': 0.595482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.350597] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Reconfigured VM instance instance-00000048 to attach disk [datastore2] ee68a538-d803-4bd6-9117-b021b28da899/ee68a538-d803-4bd6-9117-b021b28da899.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.351327] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-837c5cff-e5b7-4b60-b3aa-e29111ef4ca5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.357891] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 956.357891] env[69994]: value = "task-2925782" [ 956.357891] env[69994]: _type = "Task" [ 956.357891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.366626] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925782, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.423892] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 956.512213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.512694] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Instance network_info: |[{"id": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "address": "fa:16:3e:12:77:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e15c09f-a8", "ovs_interfaceid": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.513740] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:77:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e15c09f-a8a4-4350-aaa1-705508f4deb7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.522212] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.522539] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.523163] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3183f1d-6007-4102-a0f1-72a928e9fb18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.544200] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.544200] env[69994]: value = "task-2925783" [ 956.544200] env[69994]: _type = "Task" [ 956.544200] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.552330] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925783, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.613709] env[69994]: DEBUG nova.compute.manager [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Received event network-changed-5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.613911] env[69994]: DEBUG nova.compute.manager [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Refreshing instance network info cache due to event network-changed-5e15c09f-a8a4-4350-aaa1-705508f4deb7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 956.614365] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] Acquiring lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.614565] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] Acquired lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.614874] env[69994]: DEBUG nova.network.neutron [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Refreshing network info cache for port 5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.714988] env[69994]: DEBUG oslo_vmware.api [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925779, 'name': PowerOnVM_Task, 'duration_secs': 0.756645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.715298] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.715509] env[69994]: DEBUG nova.compute.manager [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.716362] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11e2562-19f4-4233-a661-4a4fa2355fad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.826033] env[69994]: DEBUG nova.scheduler.client.report [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.841860] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925781, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.869123] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925782, 'name': Rename_Task, 'duration_secs': 0.155307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.869511] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.869842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08aacb53-c21e-4e77-bd2a-7f4bfe699cb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.877018] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 956.877018] env[69994]: value = "task-2925784" [ 956.877018] env[69994]: _type = "Task" [ 956.877018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.888012] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.054713] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925783, 'name': CreateVM_Task, 'duration_secs': 0.396609} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.055074] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.055860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.055860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.056193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 957.056454] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-611af949-6423-481a-93d2-314b74b55e68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.061636] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 957.061636] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52967824-52d1-b293-592e-b8836d6ee049" [ 957.061636] env[69994]: _type = "Task" [ 957.061636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.069430] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52967824-52d1-b293-592e-b8836d6ee049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.235604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.336610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.338460] env[69994]: DEBUG oslo_vmware.api [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925781, 'name': PowerOnVM_Task, 'duration_secs': 0.560093} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.339785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.534s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.340481] env[69994]: INFO nova.compute.claims [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.343054] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.343463] env[69994]: DEBUG nova.compute.manager [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.344151] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d84e9b6-b935-42b6-8a63-67b102be42e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.364111] env[69994]: DEBUG nova.network.neutron [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Updated VIF entry in instance network info cache for port 5e15c09f-a8a4-4350-aaa1-705508f4deb7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.364564] env[69994]: DEBUG nova.network.neutron [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Updating instance_info_cache with network_info: [{"id": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "address": "fa:16:3e:12:77:5c", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e15c09f-a8", "ovs_interfaceid": "5e15c09f-a8a4-4350-aaa1-705508f4deb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.367029] env[69994]: INFO nova.scheduler.client.report [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Deleted allocations for instance 43119e21-5226-482c-b640-33e73051a563 [ 957.387751] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925784, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.433895] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 957.457033] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.457317] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.457478] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.457661] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.457813] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.457967] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.458197] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.458360] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.458539] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.458736] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.458927] env[69994]: DEBUG nova.virt.hardware [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.459825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a9d34e-cc22-4e37-b32e-84c2853fb2c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.468280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997554e2-4f80-41e2-971d-da9aad2970fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.518802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.519191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.519443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.519674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.519884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.525926] env[69994]: INFO nova.compute.manager [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Terminating instance [ 957.576089] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52967824-52d1-b293-592e-b8836d6ee049, 'name': SearchDatastore_Task, 'duration_secs': 0.021976} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.576089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.576089] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.576089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.576414] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.576449] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.576735] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8ec5900-b238-412a-8be3-4a607eae0153 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.585678] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.585900] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.586813] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcfd3fd3-3a45-40be-82de-3447181d0f52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.593851] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 957.593851] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c886c7-3c65-864b-dc4e-d1b1eefff6ae" [ 957.593851] env[69994]: _type = "Task" [ 957.593851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.602895] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c886c7-3c65-864b-dc4e-d1b1eefff6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.869832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.869919] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d4ede1c-6642-404d-aaf1-7b2d244f4fba req-b80ceb12-bd41-4846-ba6f-ea2d611ad39a service nova] Releasing lock "refresh_cache-f2ae08e9-fbf3-49ab-8290-75f8a53d6030" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.874722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a643c5f1-b09c-4925-87b4-0ed19246d163 tempest-ServerRescueTestJSONUnderV235-568447660 tempest-ServerRescueTestJSONUnderV235-568447660-project-member] Lock "43119e21-5226-482c-b640-33e73051a563" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.665s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.892195] env[69994]: DEBUG oslo_vmware.api [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925784, 'name': PowerOnVM_Task, 'duration_secs': 0.78212} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.893049] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.893244] env[69994]: INFO nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Took 8.29 seconds to spawn the instance on the hypervisor. [ 957.893429] env[69994]: DEBUG nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.894234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c57f250-bcfd-4394-b46a-5a261ee0d0da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.030105] env[69994]: DEBUG nova.compute.manager [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 958.030305] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.031245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f25e90d-c3ab-4148-81a0-989e4dadffc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.038878] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.039134] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80d43b1b-dc06-4fea-9ada-c9eb2baa73ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.045111] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 958.045111] env[69994]: value = "task-2925785" [ 958.045111] env[69994]: _type = "Task" [ 958.045111] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.052609] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.106296] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c886c7-3c65-864b-dc4e-d1b1eefff6ae, 'name': SearchDatastore_Task, 'duration_secs': 0.016065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.107140] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32cf2bd1-3bdb-44ec-b183-fa0ff8d6e510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.112830] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 958.112830] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524e7d7c-fc20-475f-d8d9-383581af9a61" [ 958.112830] env[69994]: _type = "Task" [ 958.112830] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.121181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "234c2683-80f3-4f29-bcc9-9853338128bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.121458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.121673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.121878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.122055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.123600] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524e7d7c-fc20-475f-d8d9-383581af9a61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.124201] env[69994]: INFO nova.compute.manager [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Terminating instance [ 958.238350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "15595947-b944-4c82-90ae-883ed951c909" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.238643] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.239029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "15595947-b944-4c82-90ae-883ed951c909-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.239220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.239391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.241617] env[69994]: INFO nova.compute.manager [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Terminating instance [ 958.419077] env[69994]: INFO nova.compute.manager [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Took 25.37 seconds to build instance. [ 958.428590] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Successfully updated port: 2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.555867] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925785, 'name': PowerOffVM_Task, 'duration_secs': 0.319639} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.556229] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.556350] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.556606] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62e1a091-f394-4949-aa24-916faa752af9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.628614] env[69994]: DEBUG nova.compute.manager [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 958.629654] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.629769] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524e7d7c-fc20-475f-d8d9-383581af9a61, 'name': SearchDatastore_Task, 'duration_secs': 0.012458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.635430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bca0a4-1dd9-4387-b71e-db18ef0c7a81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.638753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.639084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f2ae08e9-fbf3-49ab-8290-75f8a53d6030/f2ae08e9-fbf3-49ab-8290-75f8a53d6030.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.639388] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.639569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.639744] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Deleting the datastore file [datastore2] 1e19dc4d-c3dd-41e7-819f-30d54cb1390e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.641255] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7337a5c-308f-459e-99e7-d53367ad4d06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.642531] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25038fec-61be-4db5-bc10-7515b19ec399 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.648597] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.650717] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53bdfc77-8f39-4d6d-a5d7-a5e9ffe56108 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.652338] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 958.652338] env[69994]: value = "task-2925787" [ 958.652338] env[69994]: _type = "Task" [ 958.652338] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.652604] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for the task: (returnval){ [ 958.652604] env[69994]: value = "task-2925788" [ 958.652604] env[69994]: _type = "Task" [ 958.652604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.664144] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 958.664144] env[69994]: value = "task-2925789" [ 958.664144] env[69994]: _type = "Task" [ 958.664144] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.678918] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.679229] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.684462] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.746397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "refresh_cache-15595947-b944-4c82-90ae-883ed951c909" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.746397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquired lock "refresh_cache-15595947-b944-4c82-90ae-883ed951c909" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.746585] env[69994]: DEBUG nova.network.neutron [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.754750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d770321-d07b-4a91-8168-2ea0fe673fc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.762486] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc89c2a-e2e1-4e1d-8f2e-f0661724a72a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.799095] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c5acff-d2e1-4e27-a88c-f68cf5945831 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.805211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e043315-26e1-450a-9d37-c6af6c1258a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.822335] env[69994]: DEBUG nova.compute.provider_tree [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.833561] env[69994]: DEBUG nova.compute.manager [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Received event network-vif-plugged-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.833907] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Acquiring lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.834022] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.834209] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.834363] env[69994]: DEBUG nova.compute.manager [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] No waiting events found dispatching network-vif-plugged-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.834491] env[69994]: WARNING nova.compute.manager [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Received unexpected event network-vif-plugged-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd for instance with vm_state building and task_state spawning. [ 958.834688] env[69994]: DEBUG nova.compute.manager [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Received event network-changed-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.834818] env[69994]: DEBUG nova.compute.manager [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Refreshing instance network info cache due to event network-changed-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 958.835034] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Acquiring lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.835235] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Acquired lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.835420] env[69994]: DEBUG nova.network.neutron [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Refreshing network info cache for port 2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.921074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "83cef95b-99a5-4e6e-8258-79b380b595b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.921162] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.921613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.921859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.922055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.926025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be9c3463-3855-4de1-9e80-55711bbd65cb tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.883s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.927494] env[69994]: INFO nova.compute.manager [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Terminating instance [ 958.931244] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.175141] env[69994]: DEBUG oslo_vmware.api [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Task: {'id': task-2925788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213871} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.175459] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513334} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.176086] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.176299] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.176561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.176787] env[69994]: INFO nova.compute.manager [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 959.177040] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.177288] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f2ae08e9-fbf3-49ab-8290-75f8a53d6030/f2ae08e9-fbf3-49ab-8290-75f8a53d6030.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.177512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.180604] env[69994]: DEBUG nova.compute.manager [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 959.180715] env[69994]: DEBUG nova.network.neutron [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.182409] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4d9241d-230e-4ba2-a737-68cf6d5ce6d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.184240] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925789, 'name': PowerOffVM_Task, 'duration_secs': 0.284397} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.184490] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.184887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.185491] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdae80b9-fa61-437d-aba5-05a6a36e62b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.190938] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 959.190938] env[69994]: value = "task-2925790" [ 959.190938] env[69994]: _type = "Task" [ 959.190938] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.197632] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.293403] env[69994]: DEBUG nova.network.neutron [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.324186] env[69994]: DEBUG nova.scheduler.client.report [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.350129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.350544] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.351082] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleting the datastore file [datastore2] 234c2683-80f3-4f29-bcc9-9853338128bd {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.351631] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7a399ee-e6e6-4cc4-9ec6-1c753ebd5732 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.360837] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for the task: (returnval){ [ 959.360837] env[69994]: value = "task-2925792" [ 959.360837] env[69994]: _type = "Task" [ 959.360837] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.377549] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.390384] env[69994]: DEBUG nova.network.neutron [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.433242] env[69994]: DEBUG nova.compute.manager [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.433597] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.434727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80994a6b-f524-4703-bc10-ad707937a89a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.442996] env[69994]: DEBUG nova.network.neutron [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.452018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.452399] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f62c011-9ff2-4454-9e32-cf9910a22505 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.463020] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 959.463020] env[69994]: value = "task-2925793" [ 959.463020] env[69994]: _type = "Task" [ 959.463020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.479444] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.504918] env[69994]: DEBUG nova.network.neutron [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.618600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "d1875a97-9eba-47be-a76d-6088cb13412b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.623693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.704181] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081362} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.704181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.704181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437478d7-befe-45f8-b807-90834a6a4aea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.731383] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] f2ae08e9-fbf3-49ab-8290-75f8a53d6030/f2ae08e9-fbf3-49ab-8290-75f8a53d6030.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.736028] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c71ab16-8ff7-44e2-ab32-717b7e31e222 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.757279] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 959.757279] env[69994]: value = "task-2925794" [ 959.757279] env[69994]: _type = "Task" [ 959.757279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.766963] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925794, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.829822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.830261] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.833058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.833297] env[69994]: DEBUG nova.objects.instance [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lazy-loading 'resources' on Instance uuid 93087ec4-1d88-47cc-b1d2-0f1697556eae {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.874890] env[69994]: DEBUG oslo_vmware.api [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Task: {'id': task-2925792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.874890] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.874890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.874890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.874890] env[69994]: INFO nova.compute.manager [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Took 1.25 seconds to destroy the instance on the hypervisor. [ 959.875394] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.875394] env[69994]: DEBUG nova.compute.manager [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 959.875394] env[69994]: DEBUG nova.network.neutron [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.948427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Releasing lock "refresh_cache-15595947-b944-4c82-90ae-883ed951c909" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.948932] env[69994]: DEBUG nova.compute.manager [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.949174] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.950257] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d5aadd-1af7-48d0-9b86-4d2bfb513708 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.959311] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.959311] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d76c49d4-97f7-4ad9-a8be-c2f3a263e697 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.964722] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 959.964722] env[69994]: value = "task-2925795" [ 959.964722] env[69994]: _type = "Task" [ 959.964722] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.977717] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925793, 'name': PowerOffVM_Task, 'duration_secs': 0.22662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.980316] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.980533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.980793] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.981045] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfe0f601-9289-45e5-b431-defbc667f93e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.010996] env[69994]: DEBUG oslo_concurrency.lockutils [req-2cd36c3d-6e3d-42bc-87aa-d492d55fb691 req-c3f4a1ad-a5ac-4036-aa04-90975d907af3 service nova] Releasing lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.011554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.011636] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 960.069378] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.069616] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.069805] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore2] 83cef95b-99a5-4e6e-8258-79b380b595b3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.070262] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9250b0a-a748-471a-a3af-35bcac1988f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.078042] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 960.078042] env[69994]: value = "task-2925797" [ 960.078042] env[69994]: _type = "Task" [ 960.078042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.086428] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.131025] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 960.203879] env[69994]: DEBUG nova.network.neutron [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.270143] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.318115] env[69994]: DEBUG nova.compute.manager [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.320208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff031eca-de12-4bba-92c0-a9f597568455 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.335986] env[69994]: DEBUG nova.compute.utils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.337523] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 960.337730] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.388942] env[69994]: DEBUG nova.policy [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d3c84f8c5f04ca0b021ffc8d9f6ab7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef2aa3bc994a479e838e89fa7058ad64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.486392] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925795, 'name': PowerOffVM_Task, 'duration_secs': 0.223789} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.487223] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.487639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.488132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9e7d654-35f3-48cc-ac11-6fa020415b6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.514353] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.514353] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.514353] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Deleting the datastore file [datastore2] 15595947-b944-4c82-90ae-883ed951c909 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.516678] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab5cf9ca-b92f-4e51-88f0-3ac6502a162b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.522517] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for the task: (returnval){ [ 960.522517] env[69994]: value = "task-2925799" [ 960.522517] env[69994]: _type = "Task" [ 960.522517] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.530398] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.570127] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.592826] env[69994]: DEBUG oslo_vmware.api [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.593712] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.593712] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.593712] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.593712] env[69994]: INFO nova.compute.manager [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 960.593888] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.594094] env[69994]: DEBUG nova.compute.manager [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 960.594188] env[69994]: DEBUG nova.network.neutron [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.651906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.711382] env[69994]: DEBUG nova.network.neutron [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Updating instance_info_cache with network_info: [{"id": "2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd", "address": "fa:16:3e:a7:d5:bd", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdbcd8a-f3", "ovs_interfaceid": "2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.712723] env[69994]: INFO nova.compute.manager [-] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Took 1.53 seconds to deallocate network for instance. [ 960.727024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116a1f48-ec16-4685-b890-c2eb50206e13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.732488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df6974a-98fc-477c-8a77-694749c71607 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.776418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c6a3fd-63c1-47f9-8311-156eb82ad35a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.787616] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925794, 'name': ReconfigVM_Task, 'duration_secs': 0.746872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.790206] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Reconfigured VM instance instance-00000049 to attach disk [datastore1] f2ae08e9-fbf3-49ab-8290-75f8a53d6030/f2ae08e9-fbf3-49ab-8290-75f8a53d6030.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.790938] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac0c21f7-a2e8-4726-8d3c-a556b9f945d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.795147] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d56111-0978-4c12-b389-2ff5681cd7ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.812505] env[69994]: DEBUG nova.compute.provider_tree [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 960.815130] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 960.815130] env[69994]: value = "task-2925800" [ 960.815130] env[69994]: _type = "Task" [ 960.815130] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.823806] env[69994]: DEBUG nova.network.neutron [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.825241] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925800, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.832492] env[69994]: INFO nova.compute.manager [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] instance snapshotting [ 960.839755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f98245-6c99-4095-98bb-d2ec06b70f5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.843774] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 960.866938] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7d5e38-034a-46c1-80f7-94f3a273dd7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.934466] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Successfully created port: 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.947653] env[69994]: DEBUG nova.compute.manager [req-7b235843-9380-461b-bc64-1251b39cdc8b req-97504918-22bd-4dad-be5d-eced9fa5ee03 service nova] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Received event network-vif-deleted-29b654a3-e8bd-4fa0-a914-d1f1ebb404f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.947915] env[69994]: DEBUG nova.compute.manager [req-7b235843-9380-461b-bc64-1251b39cdc8b req-97504918-22bd-4dad-be5d-eced9fa5ee03 service nova] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Received event network-vif-deleted-a9985d6b-dfb2-4569-99f9-a42c283e7cd1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.033225] env[69994]: DEBUG oslo_vmware.api [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Task: {'id': task-2925799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.033513] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.033765] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.034016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.034813] env[69994]: INFO nova.compute.manager [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Took 1.09 seconds to destroy the instance on the hypervisor. [ 961.035175] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.035411] env[69994]: DEBUG nova.compute.manager [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.035826] env[69994]: DEBUG nova.network.neutron [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.055741] env[69994]: DEBUG nova.network.neutron [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 961.219944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-e03bc64f-70e9-4097-a1e1-ebf8f86508ed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.220350] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance network_info: |[{"id": "2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd", "address": "fa:16:3e:a7:d5:bd", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2fdbcd8a-f3", "ovs_interfaceid": "2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 961.220737] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:d5:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.228379] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating folder: Project (c0f5bb040f474df19739d5170639ff67). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 961.229482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.230061] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47ba2a68-a60f-4a9b-b8f7-be56a1325557 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.240759] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created folder: Project (c0f5bb040f474df19739d5170639ff67) in parent group-v587342. [ 961.240946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating folder: Instances. Parent ref: group-v587543. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 961.241196] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0dd51d48-4e29-44b4-859c-5760e89ced9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.249507] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created folder: Instances in parent group-v587543. [ 961.249727] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.249911] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 961.250133] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56a52d0c-2440-4f9b-b35c-60207be599a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.269290] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.269290] env[69994]: value = "task-2925803" [ 961.269290] env[69994]: _type = "Task" [ 961.269290] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.276964] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925803, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.327834] env[69994]: INFO nova.compute.manager [-] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Took 1.45 seconds to deallocate network for instance. [ 961.328205] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925800, 'name': Rename_Task, 'duration_secs': 0.179498} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.329818] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.332515] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9994ab91-db19-4bcf-932e-64ebdfbf2fe7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.338114] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 961.338114] env[69994]: value = "task-2925804" [ 961.338114] env[69994]: _type = "Task" [ 961.338114] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.341820] env[69994]: ERROR nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [req-847cbf25-1956-416f-a3b0-895754cb9d47] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-847cbf25-1956-416f-a3b0-895754cb9d47"}]} [ 961.351828] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.358544] env[69994]: DEBUG nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 961.373284] env[69994]: DEBUG nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 961.373532] env[69994]: DEBUG nova.compute.provider_tree [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.381173] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 961.381536] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d795f165-20c0-45fe-834c-0f709f304f31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.385575] env[69994]: DEBUG nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 961.388936] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 961.388936] env[69994]: value = "task-2925805" [ 961.388936] env[69994]: _type = "Task" [ 961.388936] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.398121] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925805, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.404522] env[69994]: DEBUG nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 961.557915] env[69994]: DEBUG nova.network.neutron [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.590334] env[69994]: DEBUG nova.network.neutron [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.701083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfcec95-3261-41c0-8d00-12031dddd461 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.709226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2291d27e-b319-4cb6-a746-8d99acd9afcf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.742448] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f464c0-434f-465a-afbb-3774e80215b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.750042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac377f39-585e-4aa0-882e-86f3c8ede3c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.763224] env[69994]: DEBUG nova.compute.provider_tree [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.778617] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925803, 'name': CreateVM_Task, 'duration_secs': 0.374759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.778781] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.780062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.780062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.780062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.780245] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e478f97-5177-44e9-a8c7-a2ce39003e97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.784480] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 961.784480] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8647f-8b45-5cb8-b138-845967efad76" [ 961.784480] env[69994]: _type = "Task" [ 961.784480] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.792232] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8647f-8b45-5cb8-b138-845967efad76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.837282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.847681] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925804, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.853968] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.880577] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.880866] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.881043] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.881249] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.881410] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.881595] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.881836] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.882011] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.882197] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.882362] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.882536] env[69994]: DEBUG nova.virt.hardware [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.883412] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f796c2a4-2cc2-49ae-ae8a-e7f5ed5b4564 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.894173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93dfa0c-5135-4be2-81e4-38ec99578dad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.903063] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925805, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.060895] env[69994]: INFO nova.compute.manager [-] [instance: 15595947-b944-4c82-90ae-883ed951c909] Took 1.03 seconds to deallocate network for instance. [ 962.095181] env[69994]: INFO nova.compute.manager [-] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Took 1.50 seconds to deallocate network for instance. [ 962.295378] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8647f-8b45-5cb8-b138-845967efad76, 'name': SearchDatastore_Task, 'duration_secs': 0.047727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.295900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.296286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.296286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.296465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.296713] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.296929] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fecc418b-74a7-4839-80f1-be7403375b68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.299722] env[69994]: DEBUG nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 962.299722] env[69994]: DEBUG nova.compute.provider_tree [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 105 to 106 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 962.299867] env[69994]: DEBUG nova.compute.provider_tree [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.310432] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.310432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.310432] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3076a81-ee74-42f9-a6aa-aacee022cecb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.315861] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 962.315861] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524ca508-6a16-8bea-f632-5fca609ac250" [ 962.315861] env[69994]: _type = "Task" [ 962.315861] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.325598] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524ca508-6a16-8bea-f632-5fca609ac250, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.347621] env[69994]: DEBUG oslo_vmware.api [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925804, 'name': PowerOnVM_Task, 'duration_secs': 0.769752} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.347914] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.348163] env[69994]: INFO nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Took 8.51 seconds to spawn the instance on the hypervisor. [ 962.348346] env[69994]: DEBUG nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.349127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8671a01-4659-4f26-83fc-2fafb38a0429 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.400589] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925805, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.492152] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Successfully updated port: 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.567685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.603122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.804865] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.972s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.808938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.616s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.809943] env[69994]: INFO nova.compute.claims [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.825461] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524ca508-6a16-8bea-f632-5fca609ac250, 'name': SearchDatastore_Task, 'duration_secs': 0.013055} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.826306] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ebcb8e-1ae5-49c6-aa68-197e04871695 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.830068] env[69994]: INFO nova.scheduler.client.report [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleted allocations for instance 93087ec4-1d88-47cc-b1d2-0f1697556eae [ 962.835359] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 962.835359] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52539241-e3e3-b424-86c5-a8159aecb9bc" [ 962.835359] env[69994]: _type = "Task" [ 962.835359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.844020] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52539241-e3e3-b424-86c5-a8159aecb9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.864762] env[69994]: INFO nova.compute.manager [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Took 23.88 seconds to build instance. [ 962.899948] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925805, 'name': CreateSnapshot_Task, 'duration_secs': 1.216196} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.900271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 962.901014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f860fd95-cf10-4a49-bea2-e8a93049763f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.981664] env[69994]: DEBUG nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Received event network-vif-deleted-2bda4ed5-72d2-44ec-bf8e-43efc8fab0db {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.981825] env[69994]: DEBUG nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-vif-plugged-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.982063] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Acquiring lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.982280] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.982447] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.982610] env[69994]: DEBUG nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] No waiting events found dispatching network-vif-plugged-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.982773] env[69994]: WARNING nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received unexpected event network-vif-plugged-3d8fb179-d40f-4e18-8089-07f61c108080 for instance with vm_state building and task_state spawning. [ 962.982930] env[69994]: DEBUG nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.983144] env[69994]: DEBUG nova.compute.manager [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing instance network info cache due to event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 962.983343] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.983478] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.983630] env[69994]: DEBUG nova.network.neutron [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.994878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.344052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a6bc10b-6e51-4968-95c4-4008602bca0d tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "93087ec4-1d88-47cc-b1d2-0f1697556eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.976s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.351210] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52539241-e3e3-b424-86c5-a8159aecb9bc, 'name': SearchDatastore_Task, 'duration_secs': 0.013243} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.351471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.351729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e03bc64f-70e9-4097-a1e1-ebf8f86508ed/e03bc64f-70e9-4097-a1e1-ebf8f86508ed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.351989] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1f865e7-41ef-4cae-9ed5-caf68b92fa1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.357914] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 963.357914] env[69994]: value = "task-2925806" [ 963.357914] env[69994]: _type = "Task" [ 963.357914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.366743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6ec9dbe-75ab-4df5-8951-9a5c6dd31e07 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.393s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.368028] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.418757] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 963.419104] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9e76cec3-7a92-4db2-8812-c7265e36496b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.427232] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 963.427232] env[69994]: value = "task-2925807" [ 963.427232] env[69994]: _type = "Task" [ 963.427232] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.435916] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.538603] env[69994]: DEBUG nova.network.neutron [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.637159] env[69994]: DEBUG nova.network.neutron [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.797972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.798312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.798571] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.798822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.799073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.801332] env[69994]: INFO nova.compute.manager [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Terminating instance [ 963.868107] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.870836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e03bc64f-70e9-4097-a1e1-ebf8f86508ed/e03bc64f-70e9-4097-a1e1-ebf8f86508ed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.871138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.871611] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a4addb9-dc10-4f87-bab9-72411c323f6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.878216] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 963.878216] env[69994]: value = "task-2925808" [ 963.878216] env[69994]: _type = "Task" [ 963.878216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.890108] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.937420] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.116206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdd2fa0-227c-4252-b245-787cd34e88cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.123683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117324c9-2b7a-4d34-a507-9f8e9a7f35c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.158368] env[69994]: DEBUG oslo_concurrency.lockutils [req-18c5967a-9625-45d4-9bf0-b64ad79f5fb4 req-d8875d2e-63d8-4875-ae25-10b845e5a53d service nova] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.159296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.159454] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.161465] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a56f8b-1ab2-46a3-a428-a338f4831da3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.170256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53ec567-5299-40a5-a748-f2f5b774afa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.184107] env[69994]: DEBUG nova.compute.provider_tree [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.306655] env[69994]: DEBUG nova.compute.manager [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.306956] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.307968] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a8ab53-76e5-42fb-976e-22aa92f7cfb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.316212] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.316454] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bee03cb-4f14-4c77-a53e-824556f3a04e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.322861] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 964.322861] env[69994]: value = "task-2925809" [ 964.322861] env[69994]: _type = "Task" [ 964.322861] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.331273] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.387422] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.387721] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 964.388505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2cea31-a4c8-41f7-9768-b4e5c15a94d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.410670] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] e03bc64f-70e9-4097-a1e1-ebf8f86508ed/e03bc64f-70e9-4097-a1e1-ebf8f86508ed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.410993] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22a6493c-0190-4a4c-816c-af87fdc25fc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.433187] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 964.433187] env[69994]: value = "task-2925810" [ 964.433187] env[69994]: _type = "Task" [ 964.433187] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.440058] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.444305] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925810, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.501890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.502271] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.502584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.502780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.503509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.505679] env[69994]: INFO nova.compute.manager [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Terminating instance [ 964.686627] env[69994]: DEBUG nova.scheduler.client.report [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.692461] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.834609] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925809, 'name': PowerOffVM_Task, 'duration_secs': 0.188711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.834894] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.835080] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.835333] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fffede8c-ba51-44dd-bea5-1996ae9a8490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.858845] env[69994]: DEBUG nova.network.neutron [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.897487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.897729] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.897916] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore1] f2ae08e9-fbf3-49ab-8290-75f8a53d6030 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.898199] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71327ea1-bed4-4095-b496-8797556198b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.904676] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 964.904676] env[69994]: value = "task-2925812" [ 964.904676] env[69994]: _type = "Task" [ 964.904676] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.912705] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.941864] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.947591] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925810, 'name': ReconfigVM_Task, 'duration_secs': 0.286805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.947747] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Reconfigured VM instance instance-0000004a to attach disk [datastore1] e03bc64f-70e9-4097-a1e1-ebf8f86508ed/e03bc64f-70e9-4097-a1e1-ebf8f86508ed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.948500] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-741048be-0f22-48d7-8ec4-6c0cb0c4cced {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.953901] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 964.953901] env[69994]: value = "task-2925813" [ 964.953901] env[69994]: _type = "Task" [ 964.953901] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.961452] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925813, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.011549] env[69994]: DEBUG nova.compute.manager [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.011801] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.012876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9f4b1c-db74-49ec-81fa-2a520706cc82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.020790] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.021073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6a18448-0f26-4ecc-b8d8-bd985b48a754 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.026622] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 965.026622] env[69994]: value = "task-2925814" [ 965.026622] env[69994]: _type = "Task" [ 965.026622] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.035016] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.191717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.192382] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 965.195229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.790s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.196687] env[69994]: INFO nova.compute.claims [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.207527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.207926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.208159] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.208340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.208509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.210736] env[69994]: INFO nova.compute.manager [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Terminating instance [ 965.361370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.361697] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance network_info: |[{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.362145] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:49:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c842425c-544e-4ce2-9657-512723bd318e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d8fb179-d40f-4e18-8089-07f61c108080', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.370245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Creating folder: Project (ef2aa3bc994a479e838e89fa7058ad64). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.371150] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d0a03ff-bcbd-49a6-a6a8-7c1460fae358 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.381219] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Created folder: Project (ef2aa3bc994a479e838e89fa7058ad64) in parent group-v587342. [ 965.381423] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Creating folder: Instances. Parent ref: group-v587548. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.381657] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7d4f7a9-3070-4cf5-ba27-e73cfebf7d21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.390207] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Created folder: Instances in parent group-v587548. [ 965.390496] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.390614] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.390813] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38f68a27-ece9-478c-aeb0-645deda5f98a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.412262] env[69994]: DEBUG oslo_vmware.api [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925812, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.413304] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.413491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.413667] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.413843] env[69994]: INFO nova.compute.manager [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Took 1.11 seconds to destroy the instance on the hypervisor. [ 965.414082] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.414265] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.414265] env[69994]: value = "task-2925817" [ 965.414265] env[69994]: _type = "Task" [ 965.414265] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.414438] env[69994]: DEBUG nova.compute.manager [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.414533] env[69994]: DEBUG nova.network.neutron [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.425132] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925817, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.437742] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.463657] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925813, 'name': Rename_Task, 'duration_secs': 0.146926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.463926] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.464193] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-942120c7-7c19-4e75-939b-b17fd889c30e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.469438] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 965.469438] env[69994]: value = "task-2925818" [ 965.469438] env[69994]: _type = "Task" [ 965.469438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.477656] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.536272] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925814, 'name': PowerOffVM_Task, 'duration_secs': 0.253061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.536551] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.536719] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.537014] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62ad7f9e-eddb-443e-a53a-d2b132a10da1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.597327] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.597575] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.597755] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleting the datastore file [datastore2] 565066c4-2f33-44c6-8e82-4c6d729cd0b7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.598026] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1321d470-57bc-4eb4-91b1-414a3915e789 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.604549] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for the task: (returnval){ [ 965.604549] env[69994]: value = "task-2925820" [ 965.604549] env[69994]: _type = "Task" [ 965.604549] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.613377] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.704016] env[69994]: DEBUG nova.compute.utils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.705221] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.705552] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.717022] env[69994]: DEBUG nova.compute.manager [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.717022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.717022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0867bfa-d0b4-49f9-90a7-ce8c7943d4fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.725035] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 965.725035] env[69994]: value = "task-2925821" [ 965.725035] env[69994]: _type = "Task" [ 965.725035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.736031] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.822870] env[69994]: DEBUG nova.policy [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '853dbd83e1504a9fb5c59663b9f66731', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '066bb6c96ed24678a7872c24f3ed8e68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.841177] env[69994]: DEBUG nova.compute.manager [req-f227033a-5f6a-4a86-96ac-20de3f1e89ae req-b81bfe3f-d888-49c8-8390-68fa8e54fcac service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Received event network-vif-deleted-5e15c09f-a8a4-4350-aaa1-705508f4deb7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.841177] env[69994]: INFO nova.compute.manager [req-f227033a-5f6a-4a86-96ac-20de3f1e89ae req-b81bfe3f-d888-49c8-8390-68fa8e54fcac service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Neutron deleted interface 5e15c09f-a8a4-4350-aaa1-705508f4deb7; detaching it from the instance and deleting it from the info cache [ 965.841177] env[69994]: DEBUG nova.network.neutron [req-f227033a-5f6a-4a86-96ac-20de3f1e89ae req-b81bfe3f-d888-49c8-8390-68fa8e54fcac service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.926631] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925817, 'name': CreateVM_Task, 'duration_secs': 0.367245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.926812] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.927545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.927716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.928048] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.928305] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2d12da6-6451-496a-8728-6f8c46ed39ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.936327] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 965.936327] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e64e97-495f-9a8d-a7b3-21c2f1ea2895" [ 965.936327] env[69994]: _type = "Task" [ 965.936327] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.940626] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925807, 'name': CloneVM_Task, 'duration_secs': 2.02543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.943603] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Created linked-clone VM from snapshot [ 965.944342] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a252bb-25a8-4a85-999f-2fda947753a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.951635] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e64e97-495f-9a8d-a7b3-21c2f1ea2895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.954811] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Uploading image 70ddab2c-a0f6-4035-8a35-ac98d07d06e4 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 965.972018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 965.972018] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-96c747ea-fe47-4759-a4b1-a4037ee9d3d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.980236] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.982095] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 965.982095] env[69994]: value = "task-2925822" [ 965.982095] env[69994]: _type = "Task" [ 965.982095] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.989136] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925822, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.118116] env[69994]: DEBUG oslo_vmware.api [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Task: {'id': task-2925820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.118116] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.118116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.118116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.118116] env[69994]: INFO nova.compute.manager [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 966.118116] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.118116] env[69994]: DEBUG nova.compute.manager [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.118455] env[69994]: DEBUG nova.network.neutron [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.206563] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 966.239183] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925821, 'name': PowerOffVM_Task, 'duration_secs': 0.195432} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.239490] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.239688] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 966.239881] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587410', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'name': 'volume-e1cb381a-6162-44bf-a51f-61502bd6cb59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1f0d79f-dc67-4cf9-816c-c451f20d65ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'serial': 'e1cb381a-6162-44bf-a51f-61502bd6cb59'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 966.240737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b33320e-fac6-486d-a4c4-58af6d4d3c5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.270040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd206660-07c9-4687-be5a-6004075ae0d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.282092] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab717621-2217-4a2d-bd62-3a59f5d3eaf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.301998] env[69994]: DEBUG nova.network.neutron [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.307534] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10508cb5-3381-4dc7-b2d9-1ab9cce47adb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.326429] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] The volume has not been displaced from its original location: [datastore2] volume-e1cb381a-6162-44bf-a51f-61502bd6cb59/volume-e1cb381a-6162-44bf-a51f-61502bd6cb59.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 966.333225] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Reconfiguring VM instance instance-00000028 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 966.337618] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-827cfaa4-15e0-4014-b8d5-93064f24cf32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.353711] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06926aae-7d79-4fed-ac94-18480926986e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.361986] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 966.361986] env[69994]: value = "task-2925823" [ 966.361986] env[69994]: _type = "Task" [ 966.361986] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.369757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccc26b2-fd5d-4cc4-bb00-8920a2264389 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.395802] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.414526] env[69994]: DEBUG nova.compute.manager [req-f227033a-5f6a-4a86-96ac-20de3f1e89ae req-b81bfe3f-d888-49c8-8390-68fa8e54fcac service nova] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Detach interface failed, port_id=5e15c09f-a8a4-4350-aaa1-705508f4deb7, reason: Instance f2ae08e9-fbf3-49ab-8290-75f8a53d6030 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 966.418057] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Successfully created port: a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.453073] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e64e97-495f-9a8d-a7b3-21c2f1ea2895, 'name': SearchDatastore_Task, 'duration_secs': 0.015479} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.453417] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.453681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.453961] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.454127] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.454325] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.454595] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-150d4c59-0278-4797-9201-1d153014d9a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.466515] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.466841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.467844] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba4c52b-b327-4860-98d0-bae4a2ce472b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.480306] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 966.480306] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52361b66-8124-6a4e-cc2f-046299321097" [ 966.480306] env[69994]: _type = "Task" [ 966.480306] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.489428] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925818, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.501325] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52361b66-8124-6a4e-cc2f-046299321097, 'name': SearchDatastore_Task, 'duration_secs': 0.01097} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.504029] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a27901ab-9fce-4c97-aa96-549cab1d6f47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.508837] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925822, 'name': Destroy_Task, 'duration_secs': 0.526621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.509766] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Destroyed the VM [ 966.509766] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 966.509945] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-35626946-8cdf-4e92-a787-ea57f4e9787e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.513915] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 966.513915] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e2178e-87e1-77ea-57f7-a92f08251e4f" [ 966.513915] env[69994]: _type = "Task" [ 966.513915] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.521582] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 966.521582] env[69994]: value = "task-2925824" [ 966.521582] env[69994]: _type = "Task" [ 966.521582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.527547] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e2178e-87e1-77ea-57f7-a92f08251e4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.535112] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925824, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.644550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a14ee46-af83-439b-91df-9759a3d6217d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.652960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f67d67a-03a2-4171-967b-4aa86b904546 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.684089] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7588c485-c570-421a-85b1-4bd32504033d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.697455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bc0a1f-98cd-42c8-a19d-7338098ec90f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.711691] env[69994]: DEBUG nova.compute.provider_tree [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.805223] env[69994]: INFO nova.compute.manager [-] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Took 1.39 seconds to deallocate network for instance. [ 966.873829] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925823, 'name': ReconfigVM_Task, 'duration_secs': 0.239937} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.874418] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Reconfigured VM instance instance-00000028 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 966.879505] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-515e7ed6-ceaf-48af-b064-f08c7ddb1f3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.896023] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 966.896023] env[69994]: value = "task-2925825" [ 966.896023] env[69994]: _type = "Task" [ 966.896023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.903630] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.982341] env[69994]: DEBUG nova.network.neutron [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.983546] env[69994]: DEBUG oslo_vmware.api [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925818, 'name': PowerOnVM_Task, 'duration_secs': 1.036435} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.983985] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.984199] env[69994]: INFO nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Took 9.55 seconds to spawn the instance on the hypervisor. [ 966.984386] env[69994]: DEBUG nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.985134] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c8162c-e958-4cd2-b610-8e9971442639 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.032538] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e2178e-87e1-77ea-57f7-a92f08251e4f, 'name': SearchDatastore_Task, 'duration_secs': 0.016528} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.032538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.032617] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e1c00159-d198-4858-b5a3-aa05152b1fda/e1c00159-d198-4858-b5a3-aa05152b1fda.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.033166] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aea01142-c8a2-4cfb-85bb-b41f26eaa10d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.039043] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925824, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.044238] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 967.044238] env[69994]: value = "task-2925826" [ 967.044238] env[69994]: _type = "Task" [ 967.044238] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.052962] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.217517] env[69994]: DEBUG nova.scheduler.client.report [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.222145] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 967.256151] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 967.256421] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 967.256588] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 967.256688] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 967.256822] env[69994]: DEBUG nova.virt.hardware [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 967.257894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a2b307-cf6a-4982-b07b-3a201bbc5573 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.267411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee664919-1365-470e-8bdb-f96b6a0b7d6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.311876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.407516] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925825, 'name': ReconfigVM_Task, 'duration_secs': 0.186068} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.407973] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587410', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'name': 'volume-e1cb381a-6162-44bf-a51f-61502bd6cb59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1f0d79f-dc67-4cf9-816c-c451f20d65ca', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1cb381a-6162-44bf-a51f-61502bd6cb59', 'serial': 'e1cb381a-6162-44bf-a51f-61502bd6cb59'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 967.408238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.409295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc705107-4e2e-4101-9652-437c539859c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.416821] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.417155] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce22bf36-dada-41a2-b5ed-1a29fd506d5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.485320] env[69994]: INFO nova.compute.manager [-] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Took 1.37 seconds to deallocate network for instance. [ 967.503238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 967.503238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 967.503506] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Deleting the datastore file [datastore2] f1f0d79f-dc67-4cf9-816c-c451f20d65ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.505663] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c58cb0d8-49d1-437a-83af-21a51503ffaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.508827] env[69994]: INFO nova.compute.manager [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Took 27.06 seconds to build instance. [ 967.515426] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for the task: (returnval){ [ 967.515426] env[69994]: value = "task-2925828" [ 967.515426] env[69994]: _type = "Task" [ 967.515426] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.525186] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.536160] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925824, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.554167] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925826, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.726075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.726075] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.728731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.493s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.728955] env[69994]: DEBUG nova.objects.instance [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 967.940135] env[69994]: DEBUG nova.compute.manager [req-7edb7dc0-2cc3-4b72-8c66-4af30c388e77 req-47d88f7b-7f28-494c-acbf-bd23a25c5a5e service nova] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Received event network-vif-deleted-2c8ad636-498e-4d08-8915-5d11ff684a84 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.993785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.010996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad5f922b-41ba-49d0-99cf-6f517f4cf7df tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.567s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.026382] env[69994]: DEBUG oslo_vmware.api [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Task: {'id': task-2925828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252964} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.029621] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.029773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.033018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.033018] env[69994]: INFO nova.compute.manager [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Took 2.32 seconds to destroy the instance on the hypervisor. [ 968.033018] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.033018] env[69994]: DEBUG nova.compute.manager [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 968.033018] env[69994]: DEBUG nova.network.neutron [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.037088] env[69994]: DEBUG oslo_vmware.api [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925824, 'name': RemoveSnapshot_Task, 'duration_secs': 1.486578} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.037338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 968.053382] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531909} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.054632] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e1c00159-d198-4858-b5a3-aa05152b1fda/e1c00159-d198-4858-b5a3-aa05152b1fda.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.054632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.054632] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c343f56d-7877-4c7e-88a6-aaa3c6a644ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.060111] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 968.060111] env[69994]: value = "task-2925829" [ 968.060111] env[69994]: _type = "Task" [ 968.060111] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.068778] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.223940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.223940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.223940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.223940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.223940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.224692] env[69994]: INFO nova.compute.manager [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Terminating instance [ 968.233447] env[69994]: DEBUG nova.compute.utils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 968.237603] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 968.237884] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.325151] env[69994]: DEBUG nova.policy [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '853dbd83e1504a9fb5c59663b9f66731', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '066bb6c96ed24678a7872c24f3ed8e68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 968.497239] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Successfully updated port: a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.542438] env[69994]: WARNING nova.compute.manager [None req-5f596095-6cc0-4b28-a8e4-98c24c72aa7e tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Image not found during snapshot: nova.exception.ImageNotFound: Image 70ddab2c-a0f6-4035-8a35-ac98d07d06e4 could not be found. [ 968.570800] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066142} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.571078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.571846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5268bb7-8fe9-48b8-9f3e-7631c799a974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.594214] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] e1c00159-d198-4858-b5a3-aa05152b1fda/e1c00159-d198-4858-b5a3-aa05152b1fda.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.594513] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65d0a647-38a3-4533-9346-9b872ab8ddb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.614869] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 968.614869] env[69994]: value = "task-2925830" [ 968.614869] env[69994]: _type = "Task" [ 968.614869] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.623684] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.690464] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Successfully created port: 93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.730085] env[69994]: DEBUG nova.compute.manager [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.730360] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.731340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a9c0d3-e729-4636-9582-847b4aa0c3c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.739581] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.739811] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcd7487a-70e3-4433-8397-9f8f6ac1789e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.742178] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.746253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-505e94a9-866d-4aa6-9d2f-b90aa7d694f5 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.747848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.881s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.748077] env[69994]: DEBUG nova.objects.instance [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] [instance: 15595947-b944-4c82-90ae-883ed951c909] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 968.755687] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 968.755687] env[69994]: value = "task-2925831" [ 968.755687] env[69994]: _type = "Task" [ 968.755687] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.765410] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.003038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.003367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.004758] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.131113] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925830, 'name': ReconfigVM_Task, 'duration_secs': 0.266616} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.134234] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfigured VM instance instance-0000004b to attach disk [datastore1] e1c00159-d198-4858-b5a3-aa05152b1fda/e1c00159-d198-4858-b5a3-aa05152b1fda.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.135430] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23b70733-3af5-4bb5-ac6a-18e5437490ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.143498] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 969.143498] env[69994]: value = "task-2925832" [ 969.143498] env[69994]: _type = "Task" [ 969.143498] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.152776] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925832, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.265401] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925831, 'name': PowerOffVM_Task, 'duration_secs': 0.213233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.266063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.266244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.266783] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a8b08f8-179a-45c2-b47f-5c8918a18511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.290787] env[69994]: DEBUG nova.network.neutron [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.330400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.331192] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.331192] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore1] e03bc64f-70e9-4097-a1e1-ebf8f86508ed {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.331568] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f994fcc4-eead-4311-a446-f08c36a87ad3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.338708] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 969.338708] env[69994]: value = "task-2925834" [ 969.338708] env[69994]: _type = "Task" [ 969.338708] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.347356] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.549647] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.562235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.562517] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.562743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.562947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.563116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.567206] env[69994]: INFO nova.compute.manager [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Terminating instance [ 969.636299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "ee68a538-d803-4bd6-9117-b021b28da899" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.636554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.636763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "ee68a538-d803-4bd6-9117-b021b28da899-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.636946] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.637168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.639110] env[69994]: INFO nova.compute.manager [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Terminating instance [ 969.654419] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925832, 'name': Rename_Task, 'duration_secs': 0.146504} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.656626] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.656869] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c9babbb-cfa1-443f-b633-dd20b63beded {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.663852] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 969.663852] env[69994]: value = "task-2925835" [ 969.663852] env[69994]: _type = "Task" [ 969.663852] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.671414] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.685752] env[69994]: DEBUG nova.network.neutron [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Updating instance_info_cache with network_info: [{"id": "a2315274-4441-4952-9041-19b79c4a331a", "address": "fa:16:3e:15:8a:a7", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2315274-44", "ovs_interfaceid": "a2315274-4441-4952-9041-19b79c4a331a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.757372] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.760670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-15b12a6e-0d52-4cf4-bdec-90d0568a735b tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.761801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.110s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.763180] env[69994]: INFO nova.compute.claims [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.788509] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.788756] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.788908] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.789151] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.789305] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.789455] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.789666] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.789822] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.790014] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.790214] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.790391] env[69994]: DEBUG nova.virt.hardware [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.791493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f67d1f4-a1ef-457f-a5a1-e4c715b94777 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.795194] env[69994]: INFO nova.compute.manager [-] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Took 1.76 seconds to deallocate network for instance. [ 969.803078] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547e1ebe-53e1-48bc-844d-9d322a5375a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.849804] env[69994]: DEBUG oslo_vmware.api [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152988} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.849804] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.849804] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.850042] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.850250] env[69994]: INFO nova.compute.manager [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Took 1.12 seconds to destroy the instance on the hypervisor. [ 969.850515] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.850730] env[69994]: DEBUG nova.compute.manager [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.850842] env[69994]: DEBUG nova.network.neutron [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.971653] env[69994]: DEBUG nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Received event network-vif-plugged-a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.971908] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Acquiring lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.972160] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.972362] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.972551] env[69994]: DEBUG nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] No waiting events found dispatching network-vif-plugged-a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 969.972747] env[69994]: WARNING nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Received unexpected event network-vif-plugged-a2315274-4441-4952-9041-19b79c4a331a for instance with vm_state building and task_state spawning. [ 969.972929] env[69994]: DEBUG nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Received event network-changed-a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.973148] env[69994]: DEBUG nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Refreshing instance network info cache due to event network-changed-a2315274-4441-4952-9041-19b79c4a331a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 969.973303] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Acquiring lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.070989] env[69994]: DEBUG nova.compute.manager [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 970.071173] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.072103] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11713ff-9713-4e56-8866-b5397a1d7259 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.080487] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.080762] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f9381f1-55de-4cfc-ba58-fbc1fa14f75b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.087163] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 970.087163] env[69994]: value = "task-2925836" [ 970.087163] env[69994]: _type = "Task" [ 970.087163] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.094990] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.142860] env[69994]: DEBUG nova.compute.manager [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 970.143135] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.144743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f90834-b10e-4df7-bfa5-5c6bb77ea777 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.153321] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.153321] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77fe6b3c-5f31-4c28-990d-c5238f95adc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.160410] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 970.160410] env[69994]: value = "task-2925837" [ 970.160410] env[69994]: _type = "Task" [ 970.160410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.170613] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.176498] env[69994]: DEBUG oslo_vmware.api [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2925835, 'name': PowerOnVM_Task, 'duration_secs': 0.446229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.176796] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.177010] env[69994]: INFO nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Took 8.32 seconds to spawn the instance on the hypervisor. [ 970.177208] env[69994]: DEBUG nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.178024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc0a3ec-11d2-4c62-b6d2-cb5b007cc946 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.190971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.190971] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Instance network_info: |[{"id": "a2315274-4441-4952-9041-19b79c4a331a", "address": "fa:16:3e:15:8a:a7", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2315274-44", "ovs_interfaceid": "a2315274-4441-4952-9041-19b79c4a331a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 970.190971] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Acquired lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.190971] env[69994]: DEBUG nova.network.neutron [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Refreshing network info cache for port a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.190971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:8a:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2315274-4441-4952-9041-19b79c4a331a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.200339] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating folder: Project (066bb6c96ed24678a7872c24f3ed8e68). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.200339] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7123477-b98a-4b5d-9530-1e058d733acd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.212183] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Created folder: Project (066bb6c96ed24678a7872c24f3ed8e68) in parent group-v587342. [ 970.212183] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating folder: Instances. Parent ref: group-v587551. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.212183] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac766400-ac49-4889-80bc-c8dd489c81df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.221798] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Created folder: Instances in parent group-v587551. [ 970.222020] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.222197] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.223058] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6008fef-e03a-4fd5-bf5a-97c1d0a0165e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.244374] env[69994]: DEBUG nova.compute.manager [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Received event network-vif-plugged-93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.244374] env[69994]: DEBUG oslo_concurrency.lockutils [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] Acquiring lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.244374] env[69994]: DEBUG oslo_concurrency.lockutils [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.244592] env[69994]: DEBUG oslo_concurrency.lockutils [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.244592] env[69994]: DEBUG nova.compute.manager [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] No waiting events found dispatching network-vif-plugged-93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.244707] env[69994]: WARNING nova.compute.manager [req-3a9a73f4-2f7c-4226-8c5c-ee61b4c78612 req-d7e690d6-d974-4d4a-8503-ef2535bee8bb service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Received unexpected event network-vif-plugged-93ff6adf-86c8-4337-bed8-32c58f7afb15 for instance with vm_state building and task_state spawning. [ 970.251304] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.251304] env[69994]: value = "task-2925840" [ 970.251304] env[69994]: _type = "Task" [ 970.251304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.262402] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925840, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.346721] env[69994]: INFO nova.compute.manager [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Took 0.55 seconds to detach 1 volumes for instance. [ 970.349036] env[69994]: DEBUG nova.compute.manager [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Deleting volume: e1cb381a-6162-44bf-a51f-61502bd6cb59 {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 970.375631] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Successfully updated port: 93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.598151] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925836, 'name': PowerOffVM_Task, 'duration_secs': 0.269017} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.598449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.600046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.600046] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72724bae-57fa-4170-83e3-d4f40fcd181f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.638213] env[69994]: DEBUG nova.network.neutron [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.671527] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925837, 'name': PowerOffVM_Task, 'duration_secs': 0.182807} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.672940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.673161] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.673491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.673690] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.673868] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Deleting the datastore file [datastore2] c06a2540-e77d-48c0-967f-94e2a53c4d8f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.674139] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6545d675-53d7-426c-ba6e-02dc503603fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.675748] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80c5225b-17a8-4df1-bd0e-b705348cca18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.681345] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for the task: (returnval){ [ 970.681345] env[69994]: value = "task-2925844" [ 970.681345] env[69994]: _type = "Task" [ 970.681345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.693837] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.698754] env[69994]: INFO nova.compute.manager [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Took 28.91 seconds to build instance. [ 970.736306] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.736543] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.736811] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleting the datastore file [datastore2] ee68a538-d803-4bd6-9117-b021b28da899 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.737419] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0caa66c7-95d1-401c-8b29-daad930827d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.744420] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for the task: (returnval){ [ 970.744420] env[69994]: value = "task-2925845" [ 970.744420] env[69994]: _type = "Task" [ 970.744420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.752670] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.761185] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925840, 'name': CreateVM_Task, 'duration_secs': 0.429658} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.761479] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.762085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.762280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.762624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 970.762889] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86aea0db-f417-448e-ad73-8b0f3b1169d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.767399] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 970.767399] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298112b-5631-28da-c1a5-a0f1dcf132b9" [ 970.767399] env[69994]: _type = "Task" [ 970.767399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.780869] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298112b-5631-28da-c1a5-a0f1dcf132b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.880393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.880393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.880393] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.904949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.915922] env[69994]: DEBUG nova.network.neutron [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Updated VIF entry in instance network info cache for port a2315274-4441-4952-9041-19b79c4a331a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.915922] env[69994]: DEBUG nova.network.neutron [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Updating instance_info_cache with network_info: [{"id": "a2315274-4441-4952-9041-19b79c4a331a", "address": "fa:16:3e:15:8a:a7", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2315274-44", "ovs_interfaceid": "a2315274-4441-4952-9041-19b79c4a331a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.142441] env[69994]: INFO nova.compute.manager [-] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Took 1.29 seconds to deallocate network for instance. [ 971.146266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e77187b-ecaf-4b2e-b29c-424dd2b119cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.159981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c98259-2e7d-4125-a2dd-d2282c522873 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.198230] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe0a5f7-dce3-4e15-9fe4-c88703bf0c1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.200863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccca4d9a-ea53-4432-ae80-249c544797a3 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.421s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.208255] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627085dd-73ba-45d5-9c68-291d6f1bec5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.212781] env[69994]: DEBUG oslo_vmware.api [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Task: {'id': task-2925844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155794} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.213057] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.213254] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.213431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.213601] env[69994]: INFO nova.compute.manager [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 971.213843] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 971.214496] env[69994]: DEBUG nova.compute.manager [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 971.217173] env[69994]: DEBUG nova.network.neutron [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.226266] env[69994]: DEBUG nova.compute.provider_tree [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.254387] env[69994]: DEBUG oslo_vmware.api [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Task: {'id': task-2925845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143666} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.254666] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.254853] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.255037] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.255222] env[69994]: INFO nova.compute.manager [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Took 1.11 seconds to destroy the instance on the hypervisor. [ 971.255488] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 971.255664] env[69994]: DEBUG nova.compute.manager [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 971.255828] env[69994]: DEBUG nova.network.neutron [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.279145] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298112b-5631-28da-c1a5-a0f1dcf132b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01777} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.279486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.279721] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.279955] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.280113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.280297] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.280569] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1eecfa8-34e6-4d12-a6a5-7dc9a8788eef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.301932] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.302140] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.302878] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f33a2f-9cfc-4de1-bb05-b25594513a55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.308453] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 971.308453] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ea2ce-4d56-5f50-0512-eabc2b88c9e2" [ 971.308453] env[69994]: _type = "Task" [ 971.308453] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.316022] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ea2ce-4d56-5f50-0512-eabc2b88c9e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.418399] env[69994]: DEBUG oslo_concurrency.lockutils [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] Releasing lock "refresh_cache-76dbf172-10b2-4439-9d2a-8226ba46062d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.418858] env[69994]: DEBUG nova.compute.manager [req-429c3b7a-1831-4d32-b2ab-79740431e599 req-5a942549-dd1a-4ce1-8cd2-4bf929140ea8 service nova] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Received event network-vif-deleted-ecb02147-aeb7-4256-9ce1-e20d727853b4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.446114] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.654235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.685204] env[69994]: DEBUG nova.network.neutron [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Updating instance_info_cache with network_info: [{"id": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "address": "fa:16:3e:8f:62:e2", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ff6adf-86", "ovs_interfaceid": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.730073] env[69994]: DEBUG nova.scheduler.client.report [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.819603] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ea2ce-4d56-5f50-0512-eabc2b88c9e2, 'name': SearchDatastore_Task, 'duration_secs': 0.019816} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.820706] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00800347-010c-44c2-a979-15925d3a5406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.826656] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 971.826656] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a8a6a-31d8-3748-7551-504ae323d303" [ 971.826656] env[69994]: _type = "Task" [ 971.826656] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.834101] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a8a6a-31d8-3748-7551-504ae323d303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.004187] env[69994]: DEBUG nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Received event network-vif-deleted-2fdbcd8a-f377-4efa-8ebf-bbadae9adfdd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.004187] env[69994]: DEBUG nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.004187] env[69994]: DEBUG nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing instance network info cache due to event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 972.004187] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.004187] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.004428] env[69994]: DEBUG nova.network.neutron [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.167912] env[69994]: DEBUG nova.network.neutron [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.185953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.186282] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Instance network_info: |[{"id": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "address": "fa:16:3e:8f:62:e2", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ff6adf-86", "ovs_interfaceid": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 972.186926] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:62:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93ff6adf-86c8-4337-bed8-32c58f7afb15', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.194454] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 972.194680] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.195173] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8fa2940-3759-46dd-8f00-ba0fbeded3bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.217887] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.217887] env[69994]: value = "task-2925846" [ 972.217887] env[69994]: _type = "Task" [ 972.217887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.225492] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925846, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.243621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.244175] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 972.247020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.018s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.247269] env[69994]: DEBUG nova.objects.instance [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lazy-loading 'resources' on Instance uuid 1e19dc4d-c3dd-41e7-819f-30d54cb1390e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.276938] env[69994]: DEBUG nova.compute.manager [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Received event network-changed-93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.277214] env[69994]: DEBUG nova.compute.manager [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Refreshing instance network info cache due to event network-changed-93ff6adf-86c8-4337-bed8-32c58f7afb15. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 972.277442] env[69994]: DEBUG oslo_concurrency.lockutils [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] Acquiring lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.277589] env[69994]: DEBUG oslo_concurrency.lockutils [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] Acquired lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.277751] env[69994]: DEBUG nova.network.neutron [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Refreshing network info cache for port 93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.291650] env[69994]: DEBUG nova.network.neutron [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.338407] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a8a6a-31d8-3748-7551-504ae323d303, 'name': SearchDatastore_Task, 'duration_secs': 0.009813} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.338909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.341702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 76dbf172-10b2-4439-9d2a-8226ba46062d/76dbf172-10b2-4439-9d2a-8226ba46062d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.341702] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f0bcd3d-6fd1-4b17-a06d-ffe2c8da1101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.346064] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 972.346064] env[69994]: value = "task-2925847" [ 972.346064] env[69994]: _type = "Task" [ 972.346064] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.353685] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.670983] env[69994]: INFO nova.compute.manager [-] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Took 1.41 seconds to deallocate network for instance. [ 972.730301] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925846, 'name': CreateVM_Task, 'duration_secs': 0.443265} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.733597] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.734640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.734990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.735385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 972.735999] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2875dd9-884b-4c55-bfdc-cffbec97d2c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.741489] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 972.741489] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5201f42e-ef75-1dad-6ac0-ba4c95c61e03" [ 972.741489] env[69994]: _type = "Task" [ 972.741489] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.753106] env[69994]: DEBUG nova.compute.utils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 972.757055] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5201f42e-ef75-1dad-6ac0-ba4c95c61e03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.758103] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 972.758103] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.786107] env[69994]: DEBUG nova.network.neutron [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updated VIF entry in instance network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.786469] env[69994]: DEBUG nova.network.neutron [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.794552] env[69994]: INFO nova.compute.manager [-] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Took 1.58 seconds to deallocate network for instance. [ 972.811280] env[69994]: DEBUG nova.policy [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d6a1603506e4d48a9d2f8bf61475821', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f602778aac0d41c49e73c2450f31d711', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 972.860230] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446152} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.863439] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 76dbf172-10b2-4439-9d2a-8226ba46062d/76dbf172-10b2-4439-9d2a-8226ba46062d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.863750] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.864598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95b1918f-1afd-4543-946a-8e6d908d4745 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.871263] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 972.871263] env[69994]: value = "task-2925848" [ 972.871263] env[69994]: _type = "Task" [ 972.871263] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.887187] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.089122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9c1bbb-a9b6-4767-8535-179cd90438b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.097146] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f33278-84c0-4824-8861-e0d9f987535c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.129326] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b460e653-7e9f-401a-b7b4-69a6da5e0933 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.142407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fbc18c-bbc0-4aae-9414-69d013a544d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.158869] env[69994]: DEBUG nova.compute.provider_tree [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.178558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.205282] env[69994]: DEBUG nova.network.neutron [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Updated VIF entry in instance network info cache for port 93ff6adf-86c8-4337-bed8-32c58f7afb15. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.205710] env[69994]: DEBUG nova.network.neutron [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Updating instance_info_cache with network_info: [{"id": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "address": "fa:16:3e:8f:62:e2", "network": {"id": "31f5fe0f-3718-477d-8b81-2e77b136105e", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3930bfd42cfa44e8b0ca650284b8ab00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ff6adf-86", "ovs_interfaceid": "93ff6adf-86c8-4337-bed8-32c58f7afb15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.256388] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5201f42e-ef75-1dad-6ac0-ba4c95c61e03, 'name': SearchDatastore_Task, 'duration_secs': 0.039564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.256797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.256797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.257100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.257563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.257563] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.257981] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 973.260592] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a3e0d88-4477-46cb-8c27-d7a93155e16c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.275957] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.276189] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.277018] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccd7e39d-2d75-4258-9040-8ad43871fa78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.283385] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 973.283385] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294ece8-019b-7f3d-07d7-d16223db16e0" [ 973.283385] env[69994]: _type = "Task" [ 973.283385] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.290946] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.291693] env[69994]: DEBUG nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Received event network-vif-deleted-43ef7f11-6496-44e9-a438-979f2407ad8b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.292032] env[69994]: INFO nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Neutron deleted interface 43ef7f11-6496-44e9-a438-979f2407ad8b; detaching it from the instance and deleting it from the info cache [ 973.292137] env[69994]: DEBUG nova.network.neutron [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.293278] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294ece8-019b-7f3d-07d7-d16223db16e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.305062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.318426] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Successfully created port: 63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.383267] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074203} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.383513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.384336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b346ad2b-d33a-4ccf-a193-3b10448f545f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.407839] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 76dbf172-10b2-4439-9d2a-8226ba46062d/76dbf172-10b2-4439-9d2a-8226ba46062d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.408149] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c026c6ae-c863-4c2b-b2a3-7ef7a57800a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.427995] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 973.427995] env[69994]: value = "task-2925849" [ 973.427995] env[69994]: _type = "Task" [ 973.427995] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.438721] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.662590] env[69994]: DEBUG nova.scheduler.client.report [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.709934] env[69994]: DEBUG oslo_concurrency.lockutils [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] Releasing lock "refresh_cache-c98308b3-2431-4f17-9022-bcd9f1e83a35" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.709934] env[69994]: DEBUG nova.compute.manager [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Received event network-vif-deleted-20b9c01f-3830-45bc-82b7-4b7014586c1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.709934] env[69994]: INFO nova.compute.manager [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Neutron deleted interface 20b9c01f-3830-45bc-82b7-4b7014586c1c; detaching it from the instance and deleting it from the info cache [ 973.709934] env[69994]: DEBUG nova.network.neutron [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.795124] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5294ece8-019b-7f3d-07d7-d16223db16e0, 'name': SearchDatastore_Task, 'duration_secs': 0.024787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.795694] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a668352-8081-4cb5-9d6d-5b172b667361 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.797839] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61028081-dff9-4338-aee3-e32121711d27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.804236] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 973.804236] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52603e74-b0c2-9915-e851-cc095cf7d3a5" [ 973.804236] env[69994]: _type = "Task" [ 973.804236] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.812035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea710212-5073-49d7-b6e5-853cd699f02e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.827903] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52603e74-b0c2-9915-e851-cc095cf7d3a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.828188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.828461] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c98308b3-2431-4f17-9022-bcd9f1e83a35/c98308b3-2431-4f17-9022-bcd9f1e83a35.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.828713] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fc30367-c555-423f-8759-3332339f9e58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.842605] env[69994]: DEBUG nova.compute.manager [req-fb1738af-9974-4844-88d7-89cb35bca788 req-e5fdc4a7-09d2-49ef-b692-7b1399f374d8 service nova] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Detach interface failed, port_id=43ef7f11-6496-44e9-a438-979f2407ad8b, reason: Instance ee68a538-d803-4bd6-9117-b021b28da899 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 973.844174] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 973.844174] env[69994]: value = "task-2925850" [ 973.844174] env[69994]: _type = "Task" [ 973.844174] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.851657] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.939370] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.169095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.172044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.334s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.172044] env[69994]: DEBUG nova.objects.instance [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lazy-loading 'resources' on Instance uuid 234c2683-80f3-4f29-bcc9-9853338128bd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.199560] env[69994]: INFO nova.scheduler.client.report [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Deleted allocations for instance 1e19dc4d-c3dd-41e7-819f-30d54cb1390e [ 974.212147] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e06399c9-7fc2-43aa-ad30-76d95c781e51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.222798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e46a228-35bf-441f-a6c2-8e123116a6e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.258215] env[69994]: DEBUG nova.compute.manager [req-cd6fa513-8058-4f37-ae56-e4b9d5fb6060 req-70c5dc95-0526-40b0-908a-1ab6a2e4c2ae service nova] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Detach interface failed, port_id=20b9c01f-3830-45bc-82b7-4b7014586c1c, reason: Instance c06a2540-e77d-48c0-967f-94e2a53c4d8f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 974.268182] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 974.296896] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 974.297228] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.297537] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 974.297628] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.297748] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 974.297901] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 974.298157] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 974.298379] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 974.298562] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 974.298736] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 974.298913] env[69994]: DEBUG nova.virt.hardware [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 974.300308] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf395cb-49ea-4783-8f3e-79c46afefd26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.309251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5504b9e4-595a-4661-8afb-06127973f77a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.353419] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494257} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.353419] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c98308b3-2431-4f17-9022-bcd9f1e83a35/c98308b3-2431-4f17-9022-bcd9f1e83a35.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.353419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.353714] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-566f8c2c-aa25-4423-a96e-2accb8365113 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.359451] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 974.359451] env[69994]: value = "task-2925851" [ 974.359451] env[69994]: _type = "Task" [ 974.359451] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.368118] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.437566] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925849, 'name': ReconfigVM_Task, 'duration_secs': 0.744546} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.437856] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 76dbf172-10b2-4439-9d2a-8226ba46062d/76dbf172-10b2-4439-9d2a-8226ba46062d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.438522] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd2832d8-0730-4865-936b-cb03832a648b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.444387] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 974.444387] env[69994]: value = "task-2925852" [ 974.444387] env[69994]: _type = "Task" [ 974.444387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.452366] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925852, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.711658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c95f0afe-a881-429e-bbc3-d312fc281b3d tempest-ServerMetadataTestJSON-861669106 tempest-ServerMetadataTestJSON-861669106-project-member] Lock "1e19dc4d-c3dd-41e7-819f-30d54cb1390e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.192s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.726372] env[69994]: DEBUG nova.compute.manager [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Received event network-vif-plugged-63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 974.726635] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] Acquiring lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.726884] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] Lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.727033] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] Lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.727768] env[69994]: DEBUG nova.compute.manager [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] No waiting events found dispatching network-vif-plugged-63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 974.727768] env[69994]: WARNING nova.compute.manager [req-4bcb8870-25f9-4f5d-be4a-54418212592d req-8b8b6834-5e77-4c3a-85e7-b87a4b6b3101 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Received unexpected event network-vif-plugged-63725932-f447-4abb-a32e-2b3b30e8e79f for instance with vm_state building and task_state spawning. [ 974.845652] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Successfully updated port: 63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.872270] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925851, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067067} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.872413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.873420] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c679c320-7a7a-4d45-b350-90559c91aa63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.899011] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] c98308b3-2431-4f17-9022-bcd9f1e83a35/c98308b3-2431-4f17-9022-bcd9f1e83a35.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.901936] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cce01c9a-7a21-4cca-af1f-11a59263d7a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.921778] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 974.921778] env[69994]: value = "task-2925853" [ 974.921778] env[69994]: _type = "Task" [ 974.921778] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.932341] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925853, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.955586] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925852, 'name': Rename_Task, 'duration_secs': 0.42437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.955898] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.956190] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5e2fa48-0f74-433a-9ab3-98cc0e53eefc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.966671] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 974.966671] env[69994]: value = "task-2925854" [ 974.966671] env[69994]: _type = "Task" [ 974.966671] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.971525] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.995917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbcc5e8-95b7-42c7-a77f-7bc7780e65c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.002889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d708f4d4-fa1f-4dad-ab99-0d957c8a93bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.035142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a96c55d-c8ae-47ee-99f3-869e6eca068a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.043365] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44899a9-06d1-4a47-89fa-ced9724006bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.056485] env[69994]: DEBUG nova.compute.provider_tree [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.349322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.349521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.349648] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.431820] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925853, 'name': ReconfigVM_Task, 'duration_secs': 0.349595} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.432450] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Reconfigured VM instance instance-0000004d to attach disk [datastore2] c98308b3-2431-4f17-9022-bcd9f1e83a35/c98308b3-2431-4f17-9022-bcd9f1e83a35.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.433083] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cab6aa8-5d7f-4555-81d6-9b598352dd1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.439574] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 975.439574] env[69994]: value = "task-2925855" [ 975.439574] env[69994]: _type = "Task" [ 975.439574] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.447514] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925855, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.473348] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.560474] env[69994]: DEBUG nova.scheduler.client.report [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.901790] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.954900] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925855, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.973906] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.041646] env[69994]: DEBUG nova.network.neutron [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Updating instance_info_cache with network_info: [{"id": "63725932-f447-4abb-a32e-2b3b30e8e79f", "address": "fa:16:3e:8e:0c:b6", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63725932-f4", "ovs_interfaceid": "63725932-f447-4abb-a32e-2b3b30e8e79f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.066441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.072273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.502s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.072273] env[69994]: DEBUG nova.objects.instance [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lazy-loading 'resources' on Instance uuid 15595947-b944-4c82-90ae-883ed951c909 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.098342] env[69994]: INFO nova.scheduler.client.report [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Deleted allocations for instance 234c2683-80f3-4f29-bcc9-9853338128bd [ 976.452632] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925855, 'name': Rename_Task, 'duration_secs': 0.923804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.452632] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.452632] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-219d79f8-e3ad-447f-99e8-18fe8edf7af5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.457382] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 976.457382] env[69994]: value = "task-2925856" [ 976.457382] env[69994]: _type = "Task" [ 976.457382] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.467359] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.475853] env[69994]: DEBUG oslo_vmware.api [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925854, 'name': PowerOnVM_Task, 'duration_secs': 1.397363} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.476227] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.476755] env[69994]: INFO nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 9.25 seconds to spawn the instance on the hypervisor. [ 976.476755] env[69994]: DEBUG nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 976.477689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabb660c-93d9-480b-8380-b1906a09d7ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.544473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.544803] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance network_info: |[{"id": "63725932-f447-4abb-a32e-2b3b30e8e79f", "address": "fa:16:3e:8e:0c:b6", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63725932-f4", "ovs_interfaceid": "63725932-f447-4abb-a32e-2b3b30e8e79f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 976.545507] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:0c:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63725932-f447-4abb-a32e-2b3b30e8e79f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.553693] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.554346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 976.554346] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b22c5f4-33a3-4bae-868a-59b10afc5b07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.583956] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.583956] env[69994]: value = "task-2925857" [ 976.583956] env[69994]: _type = "Task" [ 976.583956] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.594949] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925857, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.609999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4887c8a8-adba-4c2f-9892-ed948b48d79e tempest-ServersWithSpecificFlavorTestJSON-697419574 tempest-ServersWithSpecificFlavorTestJSON-697419574-project-member] Lock "234c2683-80f3-4f29-bcc9-9853338128bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.488s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.769462] env[69994]: DEBUG nova.compute.manager [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Received event network-changed-63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.769692] env[69994]: DEBUG nova.compute.manager [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Refreshing instance network info cache due to event network-changed-63725932-f447-4abb-a32e-2b3b30e8e79f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 976.769975] env[69994]: DEBUG oslo_concurrency.lockutils [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] Acquiring lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.770191] env[69994]: DEBUG oslo_concurrency.lockutils [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] Acquired lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.770392] env[69994]: DEBUG nova.network.neutron [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Refreshing network info cache for port 63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.895685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3186d1f-7be4-47b2-b503-4a2489cbbd2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.905037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c4fac1-3f2d-4990-aea6-c9dc2c1dd449 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.938833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119bc7b1-91b9-4f6f-8ef7-a6b2f19c6375 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.947511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da8a059-20c5-48c4-8ab8-b3a717acfc5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.961021] env[69994]: DEBUG nova.compute.provider_tree [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.970105] env[69994]: DEBUG oslo_vmware.api [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925856, 'name': PowerOnVM_Task, 'duration_secs': 0.495604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.970383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.970586] env[69994]: INFO nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Took 7.21 seconds to spawn the instance on the hypervisor. [ 976.970762] env[69994]: DEBUG nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 976.971548] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f676be-6294-459e-931a-f7c640819333 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.996838] env[69994]: INFO nova.compute.manager [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 24.82 seconds to build instance. [ 977.096385] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925857, 'name': CreateVM_Task, 'duration_secs': 0.311553} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.096456] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.097153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.097388] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.097729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 977.097988] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d701dadc-081e-4771-9626-c2687a19279e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.103139] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 977.103139] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d44ab2-6080-38a3-3881-d7c0338b5e58" [ 977.103139] env[69994]: _type = "Task" [ 977.103139] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.110854] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d44ab2-6080-38a3-3881-d7c0338b5e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.466805] env[69994]: DEBUG nova.scheduler.client.report [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.475071] env[69994]: DEBUG nova.network.neutron [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Updated VIF entry in instance network info cache for port 63725932-f447-4abb-a32e-2b3b30e8e79f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.475071] env[69994]: DEBUG nova.network.neutron [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Updating instance_info_cache with network_info: [{"id": "63725932-f447-4abb-a32e-2b3b30e8e79f", "address": "fa:16:3e:8e:0c:b6", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63725932-f4", "ovs_interfaceid": "63725932-f447-4abb-a32e-2b3b30e8e79f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.487575] env[69994]: INFO nova.compute.manager [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Took 25.10 seconds to build instance. [ 977.500602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4546f0b4-eff9-4da6-b48b-3338b26e506a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.336s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.614649] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d44ab2-6080-38a3-3881-d7c0338b5e58, 'name': SearchDatastore_Task, 'duration_secs': 0.016593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.615022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.615283] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.615526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.615674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.615851] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.616135] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-253fef87-747c-4b6f-b1da-48795dc16184 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.631022] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.631022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.631022] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5315562e-1292-4252-a144-247e1c7e087c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.636369] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 977.636369] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d4ef2a-a6a3-34bf-36e6-451b5dadc3e6" [ 977.636369] env[69994]: _type = "Task" [ 977.636369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.643975] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d4ef2a-a6a3-34bf-36e6-451b5dadc3e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.977534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.978923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.376s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.978923] env[69994]: DEBUG nova.objects.instance [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'resources' on Instance uuid 83cef95b-99a5-4e6e-8258-79b380b595b3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.980559] env[69994]: DEBUG oslo_concurrency.lockutils [req-402eaddd-2a09-4fd2-92b8-874467341024 req-61499490-4bf1-448c-bf05-9cd73f2c4083 service nova] Releasing lock "refresh_cache-d1875a97-9eba-47be-a76d-6088cb13412b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.989263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-77639f00-bd06-48dc-ac5c-b699b606d629 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.608s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.004960] env[69994]: INFO nova.scheduler.client.report [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Deleted allocations for instance 15595947-b944-4c82-90ae-883ed951c909 [ 978.150803] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d4ef2a-a6a3-34bf-36e6-451b5dadc3e6, 'name': SearchDatastore_Task, 'duration_secs': 0.0297} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.152083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d7df2b-8a4e-4fbe-a21f-63dbccfe3548 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.159799] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 978.159799] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea58f2-fb72-c312-3fd8-4e13008e55a0" [ 978.159799] env[69994]: _type = "Task" [ 978.159799] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.172380] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea58f2-fb72-c312-3fd8-4e13008e55a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.513099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adca1d45-5683-438c-8da6-946df6060b77 tempest-ServerShowV257Test-772691005 tempest-ServerShowV257Test-772691005-project-member] Lock "15595947-b944-4c82-90ae-883ed951c909" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.273s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.675976] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea58f2-fb72-c312-3fd8-4e13008e55a0, 'name': SearchDatastore_Task, 'duration_secs': 0.031091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.676239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.676478] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.676781] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c85fd922-6f73-4be9-8912-28a86bdaaee8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.688506] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 978.688506] env[69994]: value = "task-2925858" [ 978.688506] env[69994]: _type = "Task" [ 978.688506] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.698553] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.722131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1c5813-8dee-4113-ad33-dff9091ac6f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.729625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f01fd0a-d085-4db8-8361-e30391fef316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.771768] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903052aa-cbb9-47f9-a9f5-a7640e74b931 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.779758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73f7bca-5c4e-4741-b0ef-d2dc05ce12eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.794040] env[69994]: DEBUG nova.compute.provider_tree [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.196389] env[69994]: DEBUG nova.compute.manager [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.197226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26f85ee-bd57-4487-8bf1-cd6a9e0af168 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.207309] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925858, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.299032] env[69994]: DEBUG nova.scheduler.client.report [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.699771] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626542} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.700106] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.700398] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.700644] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fb82f80-04ec-4e59-8f9f-5d1c789bb961 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.709356] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 979.709356] env[69994]: value = "task-2925859" [ 979.709356] env[69994]: _type = "Task" [ 979.709356] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.714846] env[69994]: INFO nova.compute.manager [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] instance snapshotting [ 979.719428] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.720533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b4b340-14dd-4bc8-a339-bdc0766cd2f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.738978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd9e49f-b293-4470-a88c-a51e96d3e750 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.809285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.809979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.498s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.810865] env[69994]: DEBUG nova.objects.instance [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid f2ae08e9-fbf3-49ab-8290-75f8a53d6030 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.833440] env[69994]: INFO nova.scheduler.client.report [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocations for instance 83cef95b-99a5-4e6e-8258-79b380b595b3 [ 980.221901] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.222512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.223431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4518dc26-625b-4c4b-866e-b72acd7f684f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.245669] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.246349] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b25cccb2-5458-421c-8690-b22dbfd8aaef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.262674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 980.262934] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0a00de95-ea36-48ab-b220-636682301273 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.270970] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 980.270970] env[69994]: value = "task-2925861" [ 980.270970] env[69994]: _type = "Task" [ 980.270970] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.272233] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 980.272233] env[69994]: value = "task-2925860" [ 980.272233] env[69994]: _type = "Task" [ 980.272233] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.285536] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925861, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.288578] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925860, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.340257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-949ac50e-c943-483f-a1fa-2169c6fdeaad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "83cef95b-99a5-4e6e-8258-79b380b595b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.419s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.573433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb258c9f-f77a-436a-a6ec-0ed3a2226920 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.581076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725b013a-1da2-4136-b187-51ee0a8eb43a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.616395] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45806ee-36ff-4fcc-a17f-7d5693206b03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.624360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89758a86-ff3c-4f75-8918-81ddd4ec8db0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.639972] env[69994]: DEBUG nova.compute.provider_tree [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.784904] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925860, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.788529] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.143533] env[69994]: DEBUG nova.scheduler.client.report [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.284397] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925861, 'name': ReconfigVM_Task, 'duration_secs': 0.737961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.287587] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Reconfigured VM instance instance-0000004e to attach disk [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.288156] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925860, 'name': CreateSnapshot_Task, 'duration_secs': 1.003241} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.288364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1be59cbd-a5d6-46e9-b368-4f51b12e6de2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.289928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 981.290680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a07f276-6641-439f-8365-873b9e92f8da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.301931] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 981.301931] env[69994]: value = "task-2925862" [ 981.301931] env[69994]: _type = "Task" [ 981.301931] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.309541] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925862, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.649599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.652030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.658s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.652328] env[69994]: DEBUG nova.objects.instance [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lazy-loading 'resources' on Instance uuid 565066c4-2f33-44c6-8e82-4c6d729cd0b7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.674529] env[69994]: INFO nova.scheduler.client.report [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance f2ae08e9-fbf3-49ab-8290-75f8a53d6030 [ 981.809716] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 981.810099] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-80a165e3-2e65-41f8-ac3f-8729608e4afa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.821938] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925862, 'name': Rename_Task, 'duration_secs': 0.250919} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.822220] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.822462] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a4a7594-8dee-421c-b651-8919c7ab8b3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.825037] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 981.825037] env[69994]: value = "task-2925863" [ 981.825037] env[69994]: _type = "Task" [ 981.825037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.829537] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 981.829537] env[69994]: value = "task-2925864" [ 981.829537] env[69994]: _type = "Task" [ 981.829537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.835413] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925863, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.842085] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.184927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7fb7cbd-b556-4859-8e37-90da80b6fa6a tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f2ae08e9-fbf3-49ab-8290-75f8a53d6030" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.387s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.346750] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925863, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.351101] env[69994]: DEBUG oslo_vmware.api [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925864, 'name': PowerOnVM_Task, 'duration_secs': 0.502034} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.354260] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.354582] env[69994]: INFO nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Took 8.09 seconds to spawn the instance on the hypervisor. [ 982.354831] env[69994]: DEBUG nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.355861] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea17f8ee-ebb5-4f25-8fa5-bdc6a7d7aaae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.464315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e604436-03a4-4169-81df-92601730e205 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.474438] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d715b1de-8bc2-4ede-b3ae-4e091d153b51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.509866] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d51165e-5ecd-4050-90cc-19b2c80a13c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.519399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9aa3c78-00c6-455e-9f4b-69ffed5e2756 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.533727] env[69994]: DEBUG nova.compute.provider_tree [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.836303] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925863, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.880875] env[69994]: INFO nova.compute.manager [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Took 22.25 seconds to build instance. [ 983.040374] env[69994]: DEBUG nova.scheduler.client.report [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.337169] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925863, 'name': CloneVM_Task, 'duration_secs': 1.34051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.337577] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Created linked-clone VM from snapshot [ 983.338414] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6947ed-3692-4064-b021-0cb21754c3ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.346688] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Uploading image b3dc2441-8303-4714-98ba-7ee116c030ab {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 983.375271] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 983.375271] env[69994]: value = "vm-587557" [ 983.375271] env[69994]: _type = "VirtualMachine" [ 983.375271] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 983.377024] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e2c4f8eb-7391-4731-b865-e58620d9c9db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.384950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-139a64e4-a44a-47b3-afa1-70717ec1626c tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.766s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.385604] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease: (returnval){ [ 983.385604] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dadb9a-b4c6-9268-c162-978e13a8895c" [ 983.385604] env[69994]: _type = "HttpNfcLease" [ 983.385604] env[69994]: } obtained for exporting VM: (result){ [ 983.385604] env[69994]: value = "vm-587557" [ 983.385604] env[69994]: _type = "VirtualMachine" [ 983.385604] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 983.385840] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the lease: (returnval){ [ 983.385840] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dadb9a-b4c6-9268-c162-978e13a8895c" [ 983.385840] env[69994]: _type = "HttpNfcLease" [ 983.385840] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 983.392571] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 983.392571] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dadb9a-b4c6-9268-c162-978e13a8895c" [ 983.392571] env[69994]: _type = "HttpNfcLease" [ 983.392571] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 983.548797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.898s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.552052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.647s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.552052] env[69994]: DEBUG nova.objects.instance [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lazy-loading 'resources' on Instance uuid f1f0d79f-dc67-4cf9-816c-c451f20d65ca {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.570443] env[69994]: INFO nova.scheduler.client.report [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Deleted allocations for instance 565066c4-2f33-44c6-8e82-4c6d729cd0b7 [ 983.897022] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 983.897022] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dadb9a-b4c6-9268-c162-978e13a8895c" [ 983.897022] env[69994]: _type = "HttpNfcLease" [ 983.897022] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 983.897022] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 983.897022] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dadb9a-b4c6-9268-c162-978e13a8895c" [ 983.897022] env[69994]: _type = "HttpNfcLease" [ 983.897022] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 983.897022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d196f53-f60f-46a2-b87d-49ed1b84c9fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.908939] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 983.909767] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 983.985028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "5e142f6e-920a-4f11-abff-13eb5c168660" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.985028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.038412] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f28a8f94-377e-4703-8130-40fb06a7c2b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.083075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c49a4dd7-c3fa-4803-af40-e9bbe82913ae tempest-SecurityGroupsTestJSON-1289107900 tempest-SecurityGroupsTestJSON-1289107900-project-member] Lock "565066c4-2f33-44c6-8e82-4c6d729cd0b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.579s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.346653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e39d6f-65c7-4489-9885-f8c23fe7340a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.357872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2d0d3b-dcb1-4787-b10a-dbd91d413fb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.393332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae9912b-4242-438d-83ef-5f6015390491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.401173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16024fd-3386-4595-923c-3741bb8681c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.416365] env[69994]: DEBUG nova.compute.provider_tree [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.488194] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 984.743011] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "29326ab7-2b4b-42af-a90c-e86510bcd443" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.744592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.949592] env[69994]: ERROR nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] [req-8eb0db9c-61b8-48ae-90af-ba6c6ca39fb4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8eb0db9c-61b8-48ae-90af-ba6c6ca39fb4"}]} [ 984.969388] env[69994]: DEBUG nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 984.986018] env[69994]: DEBUG nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 984.986520] env[69994]: DEBUG nova.compute.provider_tree [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.007315] env[69994]: DEBUG nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 985.019062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.032246] env[69994]: DEBUG nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 985.101970] env[69994]: INFO nova.compute.manager [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Rebuilding instance [ 985.161450] env[69994]: DEBUG nova.compute.manager [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.162232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4432d038-e651-4021-9918-c02774f4ae7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.221276] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.221839] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.246559] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 985.374351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b6abd9-2f24-4107-b031-2aa53ac1ef82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.383835] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2c193e-3fc9-4918-9c0d-0fad8ce5302f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.421922] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1fd448-015d-4cb9-952a-b2f55b80bbbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.431565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaef6dba-df66-4215-9eec-efaa9f07bc74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.450155] env[69994]: DEBUG nova.compute.provider_tree [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.734417] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.737076] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 985.737076] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.784222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.849787] env[69994]: INFO nova.compute.manager [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Rebuilding instance [ 985.902953] env[69994]: DEBUG nova.compute.manager [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.903877] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e9949f-cf40-493e-8e5f-e7b5243f0e34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.985583] env[69994]: DEBUG nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 985.985773] env[69994]: DEBUG nova.compute.provider_tree [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 108 to 109 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 985.985968] env[69994]: DEBUG nova.compute.provider_tree [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.185236] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.185705] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12d4b91d-5f3b-46be-9860-0c1664745c69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.193143] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 986.193143] env[69994]: value = "task-2925866" [ 986.193143] env[69994]: _type = "Task" [ 986.193143] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.203798] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.239540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.492809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.941s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.497113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.843s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.497361] env[69994]: DEBUG nova.objects.instance [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'resources' on Instance uuid e03bc64f-70e9-4097-a1e1-ebf8f86508ed {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.518909] env[69994]: INFO nova.scheduler.client.report [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Deleted allocations for instance f1f0d79f-dc67-4cf9-816c-c451f20d65ca [ 986.704649] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925866, 'name': PowerOffVM_Task, 'duration_secs': 0.376387} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.704951] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.705218] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.706033] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457c6b43-c2f9-4ef0-b1c0-8a0f5c549af8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.712885] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 986.713161] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd28e734-f817-477d-ab4d-347e62824d86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.777741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 986.777973] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 986.778241] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.778436] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46f2c727-a963-4577-aea5-ef352abba2a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.785065] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 986.785065] env[69994]: value = "task-2925868" [ 986.785065] env[69994]: _type = "Task" [ 986.785065] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.793564] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.920201] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.920607] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a429a88-bc8b-4ed3-9e8a-c3ac53d68abe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.928622] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 986.928622] env[69994]: value = "task-2925869" [ 986.928622] env[69994]: _type = "Task" [ 986.928622] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.938083] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.026856] env[69994]: DEBUG oslo_concurrency.lockutils [None req-807da415-e9cd-4982-bd07-0befb1efa038 tempest-ServersTestBootFromVolume-1491535956 tempest-ServersTestBootFromVolume-1491535956-project-member] Lock "f1f0d79f-dc67-4cf9-816c-c451f20d65ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.819s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.253768] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938cc2a8-25c7-4383-89b0-4c7ba55cc53c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.261568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d751ca01-2cd3-456a-8ebc-1ecf6e37efb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.294798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f65b8b5-ebc7-4e8c-b924-dea759c5ed64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.302333] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158338} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.304402] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 987.304598] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 987.304773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 987.309361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebad946-493a-40f8-bfea-0e573090697b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.323806] env[69994]: DEBUG nova.compute.provider_tree [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.438523] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925869, 'name': PowerOffVM_Task, 'duration_secs': 0.194358} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.438649] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.439412] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.439683] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0aea5697-a370-4b41-ac38-d49e28cfd4f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.449515] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 987.449515] env[69994]: value = "task-2925870" [ 987.449515] env[69994]: _type = "Task" [ 987.449515] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.458946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 987.459263] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 987.459550] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587430', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'name': 'volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4cc99b2f-2d75-4a98-ac02-6b609e0c31d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'serial': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 987.460381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcdd909-b157-4365-9571-1fadf6e04004 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.480784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e44124-6595-42cb-a9b9-5f4d789fb4c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.487999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11edad45-37f1-41c9-b229-0d84d87762c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.506720] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336a7bb9-a94e-4425-9fff-f4dd99feac69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.522461] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] The volume has not been displaced from its original location: [datastore1] volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473/volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 987.529025] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 987.529025] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aa0e0bc-bd5e-4965-a9df-a7d6c7234aec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.546788] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 987.546788] env[69994]: value = "task-2925871" [ 987.546788] env[69994]: _type = "Task" [ 987.546788] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.555172] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925871, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.861658] env[69994]: DEBUG nova.scheduler.client.report [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 109 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 987.861933] env[69994]: DEBUG nova.compute.provider_tree [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 109 to 110 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 987.862134] env[69994]: DEBUG nova.compute.provider_tree [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.059303] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925871, 'name': ReconfigVM_Task, 'duration_secs': 0.14875} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.059721] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 988.066018] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c011c858-b8ef-4410-a31b-477cfa02321b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.082591] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 988.082591] env[69994]: value = "task-2925872" [ 988.082591] env[69994]: _type = "Task" [ 988.082591] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.094964] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.339709] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.340047] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.340547] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.340547] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.340656] env[69994]: DEBUG nova.virt.hardware [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.341510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35c7017-4dfa-4358-a44e-0f553ae295f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.352418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1dbfaf9-3454-498a-975b-c142ced9d69c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.366190] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:0c:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63725932-f447-4abb-a32e-2b3b30e8e79f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.373682] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.374440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.376419] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.377171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.199s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.377398] env[69994]: DEBUG nova.objects.instance [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lazy-loading 'resources' on Instance uuid ee68a538-d803-4bd6-9117-b021b28da899 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.378508] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e76e354-cdb4-4dca-81ad-48a2da397017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.396020] env[69994]: INFO nova.scheduler.client.report [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance e03bc64f-70e9-4097-a1e1-ebf8f86508ed [ 988.405886] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.405886] env[69994]: value = "task-2925873" [ 988.405886] env[69994]: _type = "Task" [ 988.405886] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.415045] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925873, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.596029] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925872, 'name': ReconfigVM_Task, 'duration_secs': 0.172043} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.596345] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587430', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'name': 'volume-cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4cc99b2f-2d75-4a98-ac02-6b609e0c31d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473', 'serial': 'cd17a3cc-4285-4a72-b443-b0f9d28d0473'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 988.596718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.600403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dd96b1-fcea-4244-ad68-0c1be5153ca3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.606653] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.607210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f31d272d-c750-4091-b833-76f012acd2dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.667637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.667637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.667734] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Deleting the datastore file [datastore1] 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.668048] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed5cf86d-870d-4141-bf36-834841bd40ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.676835] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for the task: (returnval){ [ 988.676835] env[69994]: value = "task-2925875" [ 988.676835] env[69994]: _type = "Task" [ 988.676835] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.690420] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.904070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-71ef2692-bc9e-4cd3-bee5-a7ff6efd9cf5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "e03bc64f-70e9-4097-a1e1-ebf8f86508ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.682s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.917338] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925873, 'name': CreateVM_Task, 'duration_secs': 0.368472} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.917338] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.918047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.918533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.918868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 988.918995] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8fdf2ac-ebd4-4cc3-bdd9-d4e22f2719bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.927395] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 988.927395] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d14de-36a2-7fe1-0bf8-07974a02a6f2" [ 988.927395] env[69994]: _type = "Task" [ 988.927395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.937627] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d14de-36a2-7fe1-0bf8-07974a02a6f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.143099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f830b63d-3d02-4dbc-a29f-4397d9f685c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.151671] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be292067-2d9e-4239-bea4-fd60f1e1b2fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.188341] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55faf104-3e70-4c10-b761-f80045f94a9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.201050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17014cf-940c-4c47-a8bf-191c4f0d9231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.205191] env[69994]: DEBUG oslo_vmware.api [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Task: {'id': task-2925875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.205613] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.205677] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.205830] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.218842] env[69994]: DEBUG nova.compute.provider_tree [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.281621] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 989.282104] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-387a03bf-c982-437f-b938-6d7512b62e1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.291703] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a00d8a1-9ca6-47ae-95ff-6ae0dd88f042 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.327755] env[69994]: ERROR nova.compute.manager [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Failed to detach volume cd17a3cc-4285-4a72-b443-b0f9d28d0473 from /dev/sda: nova.exception.InstanceNotFound: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Traceback (most recent call last): [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self.driver.rebuild(**kwargs) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise NotImplementedError() [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] NotImplementedError [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] During handling of the above exception, another exception occurred: [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Traceback (most recent call last): [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self.driver.detach_volume(context, old_connection_info, [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] return self._volumeops.detach_volume(connection_info, instance) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._detach_volume_vmdk(connection_info, instance) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] stable_ref.fetch_moref(session) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] nova.exception.InstanceNotFound: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. [ 989.327755] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.437728] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527d14de-36a2-7fe1-0bf8-07974a02a6f2, 'name': SearchDatastore_Task, 'duration_secs': 0.01255} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.440901] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.441196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.441448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.441598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.441787] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.442342] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf3a33b4-0026-45c9-9643-b8d414ea3174 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.451198] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.451396] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.452203] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eabfea45-72b3-4833-b10a-6bbad49ba844 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.457776] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 989.457776] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52534418-a131-b762-a311-43c81f78a8df" [ 989.457776] env[69994]: _type = "Task" [ 989.457776] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.466780] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52534418-a131-b762-a311-43c81f78a8df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.477191] env[69994]: DEBUG nova.compute.utils [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Build of instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 aborted: Failed to rebuild volume backed instance. {{(pid=69994) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 989.480854] env[69994]: ERROR nova.compute.manager [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 aborted: Failed to rebuild volume backed instance. [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Traceback (most recent call last): [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self.driver.rebuild(**kwargs) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise NotImplementedError() [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] NotImplementedError [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] During handling of the above exception, another exception occurred: [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Traceback (most recent call last): [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._detach_root_volume(context, instance, root_bdm) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] with excutils.save_and_reraise_exception(): [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self.force_reraise() [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise self.value [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self.driver.detach_volume(context, old_connection_info, [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] return self._volumeops.detach_volume(connection_info, instance) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._detach_volume_vmdk(connection_info, instance) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] stable_ref.fetch_moref(session) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] nova.exception.InstanceNotFound: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] During handling of the above exception, another exception occurred: [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Traceback (most recent call last): [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] yield [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 989.480854] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._do_rebuild_instance_with_claim( [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._do_rebuild_instance( [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._rebuild_default_impl(**kwargs) [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] self._rebuild_volume_backed_instance( [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] raise exception.BuildAbortException( [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] nova.exception.BuildAbortException: Build of instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 aborted: Failed to rebuild volume backed instance. [ 989.482706] env[69994]: ERROR nova.compute.manager [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] [ 989.688637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.688987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.724054] env[69994]: DEBUG nova.scheduler.client.report [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.969554] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52534418-a131-b762-a311-43c81f78a8df, 'name': SearchDatastore_Task, 'duration_secs': 0.010749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.970466] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d10384-ae98-42e0-a741-9df3e9bd87c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.976240] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 989.976240] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5282b6f5-7f63-752e-e592-d43bd89f76cb" [ 989.976240] env[69994]: _type = "Task" [ 989.976240] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.986684] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5282b6f5-7f63-752e-e592-d43bd89f76cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.192192] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 990.230210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.853s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.233525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.928s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.233849] env[69994]: DEBUG nova.objects.instance [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lazy-loading 'resources' on Instance uuid c06a2540-e77d-48c0-967f-94e2a53c4d8f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.255495] env[69994]: INFO nova.scheduler.client.report [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Deleted allocations for instance ee68a538-d803-4bd6-9117-b021b28da899 [ 990.487655] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5282b6f5-7f63-752e-e592-d43bd89f76cb, 'name': SearchDatastore_Task, 'duration_secs': 0.011207} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.487996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.488294] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.489866] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa47266a-3fd7-4937-98f8-6577a8f5f97c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.498315] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 990.498315] env[69994]: value = "task-2925876" [ 990.498315] env[69994]: _type = "Task" [ 990.498315] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.507521] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.718923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.766331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cea8c9fb-1008-4262-a049-38e2e2e55eac tempest-ImagesTestJSON-816155856 tempest-ImagesTestJSON-816155856-project-member] Lock "ee68a538-d803-4bd6-9117-b021b28da899" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.130s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.999020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a97005-7986-4d38-94f9-592400f60206 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.012518] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925876, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.016726] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4c99fa-3547-4bf4-a48d-9edfe25732d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.057834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ea8944-3401-4631-8dc4-f6ca9911b864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.066864] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61f1b31-1abc-4dde-a3fd-0275a77ed371 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.082702] env[69994]: DEBUG nova.compute.provider_tree [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.503702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.519985] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55409} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.522329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.522519] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.523193] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d64b0eae-1d56-491b-95ab-a089a19c3d68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.532864] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 991.532864] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 991.532864] env[69994]: value = "task-2925877" [ 991.532864] env[69994]: _type = "Task" [ 991.532864] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.533636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd10508-2c23-46ad-86d1-8f001c86a264 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.543853] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 991.543998] env[69994]: ERROR oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk due to incomplete transfer. [ 991.547049] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dbe3aff2-e240-4931-a946-9b2a88dabf20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.548619] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.555882] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521942ba-7287-5fad-06c7-c4855bd216c4/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 991.555882] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Uploaded image b3dc2441-8303-4714-98ba-7ee116c030ab to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 991.557700] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 991.558905] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8393c69f-f7e7-4425-824d-41fcd0eec97a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.565136] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 991.565136] env[69994]: value = "task-2925878" [ 991.565136] env[69994]: _type = "Task" [ 991.565136] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.576131] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925878, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.585617] env[69994]: DEBUG nova.scheduler.client.report [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.002628] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.003021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.003221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.003406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.003574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.006435] env[69994]: INFO nova.compute.manager [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Terminating instance [ 992.050927] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066418} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.050927] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 992.052094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455c197d-d42a-4557-9a4b-f49fb1184229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.077193] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.082811] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab055344-2f4e-43e5-be28-dc55b64c194d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.097367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.100646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.081s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.101671] env[69994]: INFO nova.compute.claims [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.108781] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925878, 'name': Destroy_Task, 'duration_secs': 0.421185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.110184] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Destroyed the VM [ 992.110280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 992.110601] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 992.110601] env[69994]: value = "task-2925879" [ 992.110601] env[69994]: _type = "Task" [ 992.110601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.110806] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-951c5c96-b149-4039-a693-ec040b0385a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.121880] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925879, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.122964] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 992.122964] env[69994]: value = "task-2925880" [ 992.122964] env[69994]: _type = "Task" [ 992.122964] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.126610] env[69994]: INFO nova.scheduler.client.report [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Deleted allocations for instance c06a2540-e77d-48c0-967f-94e2a53c4d8f [ 992.138454] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925880, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.513015] env[69994]: DEBUG nova.compute.manager [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 992.513420] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec8c4daf-33f4-43c4-9907-838b88295c9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.524347] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7029939-bf9b-4b88-8312-b61abf223d63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.558763] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. [ 992.558996] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.559361] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfed4d11-ceed-4d05-80e9-227b7d14271a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.567972] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efe67fb-7422-4ae1-95ef-b2ddabd130ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.601328] env[69994]: WARNING nova.virt.vmwareapi.vmops [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. [ 992.601502] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.601676] env[69994]: INFO nova.compute.manager [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Took 0.09 seconds to destroy the instance on the hypervisor. [ 992.601986] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.602257] env[69994]: DEBUG nova.compute.manager [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.602355] env[69994]: DEBUG nova.network.neutron [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.621674] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925879, 'name': ReconfigVM_Task, 'duration_secs': 0.332278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.622624] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Reconfigured VM instance instance-0000004e to attach disk [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b/d1875a97-9eba-47be-a76d-6088cb13412b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.623368] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a50e234-17de-460b-8efb-95a267fff52b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.634560] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925880, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.638172] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 992.638172] env[69994]: value = "task-2925881" [ 992.638172] env[69994]: _type = "Task" [ 992.638172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.641947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a17e1734-1061-429b-90e4-02337c9ee289 tempest-ServersTestJSON-861734553 tempest-ServersTestJSON-861734553-project-member] Lock "c06a2540-e77d-48c0-967f-94e2a53c4d8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.079s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.647108] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925881, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.137146] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925880, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.150773] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925881, 'name': Rename_Task, 'duration_secs': 0.206842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.151326] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 993.151758] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d754cb53-422a-499f-b114-7fdcd21aad6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.158396] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 993.158396] env[69994]: value = "task-2925882" [ 993.158396] env[69994]: _type = "Task" [ 993.158396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.169402] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.353108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e9ce38-d737-4dcb-99b7-86df547cb402 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.361131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d299af36-371c-43d9-8323-6b2f51aca1fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.411553] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6119cd6-deaf-49c2-ae8e-254d76d94f60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.424867] env[69994]: DEBUG nova.compute.manager [req-54014095-073a-455b-8caf-bb17267302a0 req-c861f306-ce8f-4d37-98b3-da0913ce5705 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Received event network-vif-deleted-bd1e50cf-3e19-4962-b159-76798af793d4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.424867] env[69994]: INFO nova.compute.manager [req-54014095-073a-455b-8caf-bb17267302a0 req-c861f306-ce8f-4d37-98b3-da0913ce5705 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Neutron deleted interface bd1e50cf-3e19-4962-b159-76798af793d4; detaching it from the instance and deleting it from the info cache [ 993.425393] env[69994]: DEBUG nova.network.neutron [req-54014095-073a-455b-8caf-bb17267302a0 req-c861f306-ce8f-4d37-98b3-da0913ce5705 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.432521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33105622-722f-4748-acee-bae9219cd8d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.452407] env[69994]: DEBUG nova.compute.provider_tree [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.636032] env[69994]: DEBUG oslo_vmware.api [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925880, 'name': RemoveSnapshot_Task, 'duration_secs': 1.316064} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.636334] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 993.636697] env[69994]: INFO nova.compute.manager [None req-1bd655b2-161e-4d67-9735-6c5816cfddb0 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 13.92 seconds to snapshot the instance on the hypervisor. [ 993.668779] env[69994]: DEBUG oslo_vmware.api [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925882, 'name': PowerOnVM_Task, 'duration_secs': 0.499502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.669088] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.669503] env[69994]: DEBUG nova.compute.manager [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.670254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2800300-81fb-4315-8359-cbb8a6209491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.811238] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.811483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.915251] env[69994]: DEBUG nova.network.neutron [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.932870] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e88725f-d59d-4c13-bd2e-73f213fa1bfd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.945071] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1c5717-076b-46b0-93a6-599a1ffd6e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.959186] env[69994]: DEBUG nova.scheduler.client.report [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.990333] env[69994]: DEBUG nova.compute.manager [req-54014095-073a-455b-8caf-bb17267302a0 req-c861f306-ce8f-4d37-98b3-da0913ce5705 service nova] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Detach interface failed, port_id=bd1e50cf-3e19-4962-b159-76798af793d4, reason: Instance 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 994.189636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.316379] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 994.419288] env[69994]: INFO nova.compute.manager [-] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Took 1.82 seconds to deallocate network for instance. [ 994.465518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.466355] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 994.468939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.685s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.470405] env[69994]: INFO nova.compute.claims [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.848200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.964853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "d1875a97-9eba-47be-a76d-6088cb13412b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.965170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.965381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.965561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.965728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.968461] env[69994]: INFO nova.compute.manager [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Terminating instance [ 994.976697] env[69994]: DEBUG nova.compute.utils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 994.981938] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.982696] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.990690] env[69994]: INFO nova.compute.manager [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Took 0.57 seconds to detach 1 volumes for instance. [ 994.996125] env[69994]: DEBUG nova.compute.manager [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Deleting volume: cd17a3cc-4285-4a72-b443-b0f9d28d0473 {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 995.025086] env[69994]: DEBUG nova.policy [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 995.366487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "1ff25686-e13e-4003-909b-18bf919aa20c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.366746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.450389] env[69994]: DEBUG nova.compute.manager [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.451373] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9935c25-651b-4b19-a2ea-433f5fa92265 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.476752] env[69994]: DEBUG nova.compute.manager [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.477028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.478085] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ac3e34-a8b4-47ff-9693-e03ea0cb310a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.483320] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 995.486680] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Successfully created port: 961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.496295] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.496537] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8c8113d-84bf-4059-a0dd-226322e79d64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.505569] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 995.505569] env[69994]: value = "task-2925884" [ 995.505569] env[69994]: _type = "Task" [ 995.505569] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.518378] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.561949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.726265] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.726488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.799172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c7ac3f-a9a3-4965-b6e8-b941a921c04a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.808511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48230ab-0598-4ff0-af7d-2759328a56d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.845101] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9ec732-52d8-42ce-8dfa-2a41d143b644 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.853254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af5db79-6b64-46a5-9dd6-38c978c6833f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.867908] env[69994]: DEBUG nova.compute.provider_tree [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.869670] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.967929] env[69994]: INFO nova.compute.manager [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] instance snapshotting [ 995.970858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d2cdc7-79f0-47a4-b29e-a6578210f1c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.990706] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c271fc1-ab2a-4bbe-9f62-919c4aae41ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.016424] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925884, 'name': PowerOffVM_Task, 'duration_secs': 0.198211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.018027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.018027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.018027] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5740af34-a417-4154-89f7-18194bbfaf7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.078964] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.079240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.079426] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore2] d1875a97-9eba-47be-a76d-6088cb13412b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.079756] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fa3e4b0-aa23-4617-96e0-aae3aeba7932 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.086456] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 996.086456] env[69994]: value = "task-2925886" [ 996.086456] env[69994]: _type = "Task" [ 996.086456] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.094869] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.228679] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 996.373552] env[69994]: DEBUG nova.scheduler.client.report [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.399729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.499870] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 996.507808] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 996.508147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-80053e5e-0228-4a69-bfae-536eeba009d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.519031] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 996.519031] env[69994]: value = "task-2925887" [ 996.519031] env[69994]: _type = "Task" [ 996.519031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.529361] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.529504] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.529693] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.529927] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.530108] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.530287] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.530500] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.530676] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.530874] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.531076] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 996.531271] env[69994]: DEBUG nova.virt.hardware [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 996.532519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30a44c6-bc53-4606-98a6-a089d015e5bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.538161] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925887, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.544042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2101e3b1-46af-48e1-9da3-efde35458e94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.595850] env[69994]: DEBUG oslo_vmware.api [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.596136] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.596366] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.596554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.596743] env[69994]: INFO nova.compute.manager [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 996.596974] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.597175] env[69994]: DEBUG nova.compute.manager [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.597272] env[69994]: DEBUG nova.network.neutron [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.750520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.883898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.884046] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 996.887577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.648s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.887577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.887763] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 996.888008] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.169s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.889774] env[69994]: INFO nova.compute.claims [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.893044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d5919f-0741-4f77-bcfc-a49b2c802f62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.902110] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a312fe0-5cbc-43b7-9d36-519368206614 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.922241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84c9dc-b06c-4a3b-9858-5325aae6db1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.929568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2752dbda-8d92-4c7f-b738-53e9620bd92d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.973777] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179388MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 996.974130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.032795] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925887, 'name': CreateSnapshot_Task, 'duration_secs': 0.496179} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.035377] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 997.038296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3d7e70-a319-45f6-b126-a8719dbbb319 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.044770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "686feb53-00e2-43d9-b316-09c089df0891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.045008] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.153183] env[69994]: DEBUG nova.compute.manager [req-592338f2-e155-4b9e-9d92-769b508b9fad req-896fdc59-9d3d-42f7-b21a-bd40607e471d service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Received event network-vif-deleted-63725932-f447-4abb-a32e-2b3b30e8e79f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.153475] env[69994]: INFO nova.compute.manager [req-592338f2-e155-4b9e-9d92-769b508b9fad req-896fdc59-9d3d-42f7-b21a-bd40607e471d service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Neutron deleted interface 63725932-f447-4abb-a32e-2b3b30e8e79f; detaching it from the instance and deleting it from the info cache [ 997.153692] env[69994]: DEBUG nova.network.neutron [req-592338f2-e155-4b9e-9d92-769b508b9fad req-896fdc59-9d3d-42f7-b21a-bd40607e471d service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.370919] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Successfully updated port: 961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.397036] env[69994]: DEBUG nova.compute.utils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 997.397036] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 997.397036] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 997.462130] env[69994]: DEBUG nova.policy [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d765bc5431e4437bcf442fca77e1281', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59e532e93d74423ea976adf4385b9a1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 997.553392] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 997.570023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 997.570023] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0181881f-5334-468c-bf03-c2b74bfa2386 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.577114] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 997.577114] env[69994]: value = "task-2925888" [ 997.577114] env[69994]: _type = "Task" [ 997.577114] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.585548] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925888, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.633776] env[69994]: DEBUG nova.network.neutron [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.661571] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ffb63ca-7baa-4bba-bc68-226953e5c028 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.670029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401dc924-ccdb-4390-b966-faae7b0d26ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.700253] env[69994]: DEBUG nova.compute.manager [req-592338f2-e155-4b9e-9d92-769b508b9fad req-896fdc59-9d3d-42f7-b21a-bd40607e471d service nova] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Detach interface failed, port_id=63725932-f447-4abb-a32e-2b3b30e8e79f, reason: Instance d1875a97-9eba-47be-a76d-6088cb13412b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 997.875139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.875139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.875139] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.900041] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 998.083235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.094243] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925888, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.136490] env[69994]: INFO nova.compute.manager [-] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Took 1.54 seconds to deallocate network for instance. [ 998.168105] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Successfully created port: b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.197380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b851f698-2638-47d6-b30b-1208dfbd4c5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.207285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09664fce-6e8c-4927-a640-954e9bdc40ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.243988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51446e4a-366a-4e7f-a80d-ab83d4f9c2ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.253891] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b623f0-583a-4e75-af00-493d197c344e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.269687] env[69994]: DEBUG nova.compute.provider_tree [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.415847] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.560652] env[69994]: DEBUG nova.network.neutron [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Updating instance_info_cache with network_info: [{"id": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "address": "fa:16:3e:7e:e8:63", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961a1fd7-bc", "ovs_interfaceid": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.589738] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925888, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.648729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.774265] env[69994]: DEBUG nova.scheduler.client.report [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.919125] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.947120] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.947669] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.948093] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.948414] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.948743] env[69994]: DEBUG nova.virt.hardware [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.950344] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d5f764-1b98-492c-820f-7b66628afee3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.960083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9b1632-853f-4ac1-88c4-c6b6f51b73a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.063257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.063670] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Instance network_info: |[{"id": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "address": "fa:16:3e:7e:e8:63", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961a1fd7-bc", "ovs_interfaceid": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.064995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:e8:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '961a1fd7-bcab-47f6-a2b7-6dd5fa005a30', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.071793] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.072028] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.072259] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-377051de-5728-42e7-82d8-62e03cfd71a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.097315] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925888, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.098580] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.098580] env[69994]: value = "task-2925889" [ 999.098580] env[69994]: _type = "Task" [ 999.098580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.107345] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925889, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.184902] env[69994]: DEBUG nova.compute.manager [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Received event network-vif-plugged-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.185152] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Acquiring lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.185443] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.185575] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.185700] env[69994]: DEBUG nova.compute.manager [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] No waiting events found dispatching network-vif-plugged-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.185834] env[69994]: WARNING nova.compute.manager [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Received unexpected event network-vif-plugged-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 for instance with vm_state building and task_state spawning. [ 999.186019] env[69994]: DEBUG nova.compute.manager [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Received event network-changed-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.186271] env[69994]: DEBUG nova.compute.manager [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Refreshing instance network info cache due to event network-changed-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 999.186410] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Acquiring lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.186548] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Acquired lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.186683] env[69994]: DEBUG nova.network.neutron [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Refreshing network info cache for port 961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.280180] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.280791] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 999.283691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.781s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.532015] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb829c3-f068-4e0f-bfc4-a97fed3dd8e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.540911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92612d8e-c59b-4385-b48a-2f896fb9d882 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.573103] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353e7c84-6851-41ac-a1d2-bbe11cc199e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.580725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c0ea2e-71d1-4699-bd1a-dfb6af0c8245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.595023] env[69994]: DEBUG nova.compute.provider_tree [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.606530] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925888, 'name': CloneVM_Task, 'duration_secs': 1.531228} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.607848] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Created linked-clone VM from snapshot [ 999.608758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72ebcf4-67d9-4a20-8012-888966664462 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.613872] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925889, 'name': CreateVM_Task, 'duration_secs': 0.303531} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.614365] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.615058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.615233] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.615546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 999.615786] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ded1b27-993b-45f4-bcd3-71a37e6de99f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.620920] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Uploading image ee1b98b0-022f-4790-960d-699d8d8d274a {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 999.626553] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 999.626553] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d3b53a-f66f-0f00-456c-29eda1d424bd" [ 999.626553] env[69994]: _type = "Task" [ 999.626553] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.637013] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d3b53a-f66f-0f00-456c-29eda1d424bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.652065] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 999.652065] env[69994]: value = "vm-587560" [ 999.652065] env[69994]: _type = "VirtualMachine" [ 999.652065] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 999.652065] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8da8a71b-dc1b-4dd0-b62a-e37f707f240a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.660060] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease: (returnval){ [ 999.660060] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521dfb1b-8ca7-a208-d517-a708361ba273" [ 999.660060] env[69994]: _type = "HttpNfcLease" [ 999.660060] env[69994]: } obtained for exporting VM: (result){ [ 999.660060] env[69994]: value = "vm-587560" [ 999.660060] env[69994]: _type = "VirtualMachine" [ 999.660060] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 999.660386] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the lease: (returnval){ [ 999.660386] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521dfb1b-8ca7-a208-d517-a708361ba273" [ 999.660386] env[69994]: _type = "HttpNfcLease" [ 999.660386] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 999.667129] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 999.667129] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521dfb1b-8ca7-a208-d517-a708361ba273" [ 999.667129] env[69994]: _type = "HttpNfcLease" [ 999.667129] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 999.714033] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Successfully updated port: b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.788361] env[69994]: DEBUG nova.compute.utils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 999.789721] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.789948] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.869456] env[69994]: DEBUG nova.policy [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 999.872460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.872460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.980728] env[69994]: DEBUG nova.network.neutron [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Updated VIF entry in instance network info cache for port 961a1fd7-bcab-47f6-a2b7-6dd5fa005a30. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.981212] env[69994]: DEBUG nova.network.neutron [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Updating instance_info_cache with network_info: [{"id": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "address": "fa:16:3e:7e:e8:63", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap961a1fd7-bc", "ovs_interfaceid": "961a1fd7-bcab-47f6-a2b7-6dd5fa005a30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.103079] env[69994]: DEBUG nova.scheduler.client.report [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.141773] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d3b53a-f66f-0f00-456c-29eda1d424bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011402} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.142261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.142778] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.143190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.143524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.143988] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.144381] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4aafb665-6c21-4c9f-be8f-295d894fc1eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.157018] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.157018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.157018] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767d6755-e9ce-4c8e-a835-bdc42dc1bd6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.165226] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1000.165226] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c2c4a3-0c80-ad25-2d5f-5306c959f1cf" [ 1000.165226] env[69994]: _type = "Task" [ 1000.165226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.170762] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.170762] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521dfb1b-8ca7-a208-d517-a708361ba273" [ 1000.170762] env[69994]: _type = "HttpNfcLease" [ 1000.170762] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1000.172336] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1000.172336] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521dfb1b-8ca7-a208-d517-a708361ba273" [ 1000.172336] env[69994]: _type = "HttpNfcLease" [ 1000.172336] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1000.173282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b07cce-bb85-4b11-846b-1a4d76d7c4cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.179971] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c2c4a3-0c80-ad25-2d5f-5306c959f1cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009533} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.181258] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1141cd62-82e1-4bc9-9625-05ec889f1178 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.187119] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1000.187413] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1000.254381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.254562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquired lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.254719] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.255936] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1000.255936] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d2b0da-e955-65e8-cbba-1630bb93f11b" [ 1000.255936] env[69994]: _type = "Task" [ 1000.255936] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.257089] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Successfully created port: fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.274036] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d2b0da-e955-65e8-cbba-1630bb93f11b, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.274036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.274192] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 5e142f6e-920a-4f11-abff-13eb5c168660/5e142f6e-920a-4f11-abff-13eb5c168660.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.274338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b5de2e6-72c6-40a7-adf2-56c14b7a1f3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.282188] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1000.282188] env[69994]: value = "task-2925891" [ 1000.282188] env[69994]: _type = "Task" [ 1000.282188] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.291507] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.294418] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1000.297369] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8ba9d346-0872-4a57-ac07-1fc8f1ffb58a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.373804] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1000.483728] env[69994]: DEBUG oslo_concurrency.lockutils [req-118aaecb-9353-40a5-8c4a-204c49739510 req-7e51ee2a-5cb6-4566-8cd5-d207cf821659 service nova] Releasing lock "refresh_cache-5e142f6e-920a-4f11-abff-13eb5c168660" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.609199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.325s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.609543] env[69994]: INFO nova.compute.manager [None req-53a96fc8-a0d7-47c9-a4b4-09920521ffc3 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Successfully reverted task state from rebuilding on failure for instance. [ 1000.617100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.426s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.617100] env[69994]: DEBUG nova.objects.instance [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1000.792929] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.793970] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.795994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 5e142f6e-920a-4f11-abff-13eb5c168660/5e142f6e-920a-4f11-abff-13eb5c168660.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.796260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.796914] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ff0e374-c10c-4493-acdb-ccfd6768bee1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.806498] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1000.806498] env[69994]: value = "task-2925892" [ 1000.806498] env[69994]: _type = "Task" [ 1000.806498] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.816292] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.899694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.946324] env[69994]: DEBUG nova.network.neutron [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Updating instance_info_cache with network_info: [{"id": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "address": "fa:16:3e:e7:7a:02", "network": {"id": "63d9fcad-0017-4af7-9a5c-3efe4bbda04f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-139139119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59e532e93d74423ea976adf4385b9a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6f3b951-fb", "ovs_interfaceid": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.221883] env[69994]: DEBUG nova.compute.manager [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Received event network-vif-plugged-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.222203] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Acquiring lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.228368] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.228785] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.229169] env[69994]: DEBUG nova.compute.manager [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] No waiting events found dispatching network-vif-plugged-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1001.229534] env[69994]: WARNING nova.compute.manager [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Received unexpected event network-vif-plugged-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd for instance with vm_state building and task_state spawning. [ 1001.229882] env[69994]: DEBUG nova.compute.manager [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Received event network-changed-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.230137] env[69994]: DEBUG nova.compute.manager [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Refreshing instance network info cache due to event network-changed-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1001.232536] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Acquiring lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.308909] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1001.327350] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.327992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.329037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fc6815-69a6-4930-96e9-500cbf1257d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.359335] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 5e142f6e-920a-4f11-abff-13eb5c168660/5e142f6e-920a-4f11-abff-13eb5c168660.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.362643] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1001.363182] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.363632] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1001.364187] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.364542] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1001.364932] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1001.364932] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1001.364932] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1001.364932] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1001.366348] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1001.366796] env[69994]: DEBUG nova.virt.hardware [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.367242] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e653909-8fe5-45b1-ad5a-c77aa1b7d3f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.384301] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39aa2be-810a-4217-8f5d-1577ccd45c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.395014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3fcaec-9eef-47c1-a8fc-6e89fd938bdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.399317] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1001.399317] env[69994]: value = "task-2925893" [ 1001.399317] env[69994]: _type = "Task" [ 1001.399317] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.417550] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.449585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Releasing lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.449775] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Instance network_info: |[{"id": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "address": "fa:16:3e:e7:7a:02", "network": {"id": "63d9fcad-0017-4af7-9a5c-3efe4bbda04f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-139139119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59e532e93d74423ea976adf4385b9a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6f3b951-fb", "ovs_interfaceid": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1001.450118] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Acquired lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.450313] env[69994]: DEBUG nova.network.neutron [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Refreshing network info cache for port b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.451692] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:7a:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32faf59b-014c-4f1f-8331-40df95bf741f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6f3b951-fb8f-4467-98e6-ec4cdf4169cd', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.459484] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Creating folder: Project (59e532e93d74423ea976adf4385b9a1e). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.460870] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfe2ac54-0132-4f75-af1a-7534106b1154 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.473538] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Created folder: Project (59e532e93d74423ea976adf4385b9a1e) in parent group-v587342. [ 1001.473836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Creating folder: Instances. Parent ref: group-v587562. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.474361] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf05d3e7-bae2-473f-8f25-9d90f872f179 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.487090] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Created folder: Instances in parent group-v587562. [ 1001.487446] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.487730] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.488303] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5979709-1830-4ce5-9a83-56e826515604 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.513891] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.513891] env[69994]: value = "task-2925896" [ 1001.513891] env[69994]: _type = "Task" [ 1001.513891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.522153] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925896, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.629185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67823cc3-56d3-42b2-9341-cf3c2f8a62f7 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.630911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.633097] env[69994]: INFO nova.compute.claims [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.794980] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Successfully updated port: fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.909890] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.026322] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925896, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.217904] env[69994]: DEBUG nova.network.neutron [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Updated VIF entry in instance network info cache for port b6f3b951-fb8f-4467-98e6-ec4cdf4169cd. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1002.218325] env[69994]: DEBUG nova.network.neutron [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Updating instance_info_cache with network_info: [{"id": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "address": "fa:16:3e:e7:7a:02", "network": {"id": "63d9fcad-0017-4af7-9a5c-3efe4bbda04f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-139139119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59e532e93d74423ea976adf4385b9a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6f3b951-fb", "ovs_interfaceid": "b6f3b951-fb8f-4467-98e6-ec4cdf4169cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.297710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.297872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.298034] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.410938] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925893, 'name': ReconfigVM_Task, 'duration_secs': 0.70109} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.411221] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 5e142f6e-920a-4f11-abff-13eb5c168660/5e142f6e-920a-4f11-abff-13eb5c168660.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.411906] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30afcefd-d924-4c2e-a5e2-3585867cec87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.420346] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1002.420346] env[69994]: value = "task-2925897" [ 1002.420346] env[69994]: _type = "Task" [ 1002.420346] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.432749] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925897, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.526106] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925896, 'name': CreateVM_Task, 'duration_secs': 0.591975} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.526465] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.527276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.527564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.528091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1002.528381] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05077208-4e2b-41b2-94a7-5eb8f7503b10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.534604] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1002.534604] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bc9a95-9620-6636-c353-fd51eaa2ed20" [ 1002.534604] env[69994]: _type = "Task" [ 1002.534604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.543497] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bc9a95-9620-6636-c353-fd51eaa2ed20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.723121] env[69994]: DEBUG oslo_concurrency.lockutils [req-6283fe3e-a79a-4e81-8407-ad46efbe02b5 req-1cdf1dd4-a2e7-4bc7-9898-9e10b818d06b service nova] Releasing lock "refresh_cache-29326ab7-2b4b-42af-a90c-e86510bcd443" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.834906] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.899528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae026de0-f516-4528-9249-4a4cf2bc3792 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.911046] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c2229c-28ae-4ac6-b70d-6b8c310a1cc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.945750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bea90f-7bd4-43a9-9ff4-a775f7fb225d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.959026] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925897, 'name': Rename_Task, 'duration_secs': 0.198765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.959306] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.961035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffaa241-0b08-424f-9c98-fce77a3892f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.964988] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-523ddbfd-f8f6-4d63-999c-215b4bca04d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.977952] env[69994]: DEBUG nova.compute.provider_tree [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.980730] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1002.980730] env[69994]: value = "task-2925898" [ 1002.980730] env[69994]: _type = "Task" [ 1002.980730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.984853] env[69994]: DEBUG nova.network.neutron [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Updating instance_info_cache with network_info: [{"id": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "address": "fa:16:3e:f3:d3:94", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc6909-ef", "ovs_interfaceid": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.991915] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.046700] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bc9a95-9620-6636-c353-fd51eaa2ed20, 'name': SearchDatastore_Task, 'duration_secs': 0.01312} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.047021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.047262] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.047505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.047664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.047874] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.048163] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b1d2244-522e-4d01-b3bb-9dec1ce7df6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.058317] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.058512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.059243] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-335be092-ddc1-42d2-aae3-a33f072b02cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.064823] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1003.064823] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6571d-e5fa-3bf1-7ae1-14c6c6acf964" [ 1003.064823] env[69994]: _type = "Task" [ 1003.064823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.073015] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6571d-e5fa-3bf1-7ae1-14c6c6acf964, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.273488] env[69994]: DEBUG nova.compute.manager [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Received event network-vif-plugged-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.273766] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.273915] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.274213] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.274296] env[69994]: DEBUG nova.compute.manager [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] No waiting events found dispatching network-vif-plugged-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.274460] env[69994]: WARNING nova.compute.manager [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Received unexpected event network-vif-plugged-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 for instance with vm_state building and task_state spawning. [ 1003.274703] env[69994]: DEBUG nova.compute.manager [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Received event network-changed-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.274799] env[69994]: DEBUG nova.compute.manager [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Refreshing instance network info cache due to event network-changed-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1003.274938] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Acquiring lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.483276] env[69994]: DEBUG nova.scheduler.client.report [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.490129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.490431] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Instance network_info: |[{"id": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "address": "fa:16:3e:f3:d3:94", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc6909-ef", "ovs_interfaceid": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1003.490779] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Acquired lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.490993] env[69994]: DEBUG nova.network.neutron [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Refreshing network info cache for port fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.492194] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:d3:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcbc6909-efc0-4ccc-8b55-763a5b3a9c73', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.501760] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.506359] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.509850] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7746c33-6082-4a54-bcb2-1db724100cc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.525890] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925898, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.534025] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.534025] env[69994]: value = "task-2925899" [ 1003.534025] env[69994]: _type = "Task" [ 1003.534025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.543876] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925899, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.577883] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6571d-e5fa-3bf1-7ae1-14c6c6acf964, 'name': SearchDatastore_Task, 'duration_secs': 0.017453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.578772] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca5dc0b5-d239-4faa-8a6d-41a004a9b2e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.585309] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1003.585309] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52546f8e-1dcd-df4d-4fb3-a738fc2f65b5" [ 1003.585309] env[69994]: _type = "Task" [ 1003.585309] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.595225] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52546f8e-1dcd-df4d-4fb3-a738fc2f65b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.755925] env[69994]: DEBUG nova.network.neutron [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Updated VIF entry in instance network info cache for port fcbc6909-efc0-4ccc-8b55-763a5b3a9c73. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.756345] env[69994]: DEBUG nova.network.neutron [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Updating instance_info_cache with network_info: [{"id": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "address": "fa:16:3e:f3:d3:94", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc6909-ef", "ovs_interfaceid": "fcbc6909-efc0-4ccc-8b55-763a5b3a9c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.991145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.991610] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.994298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.433s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.994498] env[69994]: DEBUG nova.objects.instance [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lazy-loading 'resources' on Instance uuid 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.001411] env[69994]: DEBUG oslo_vmware.api [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925898, 'name': PowerOnVM_Task, 'duration_secs': 0.639493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.001513] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.001945] env[69994]: INFO nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Took 7.50 seconds to spawn the instance on the hypervisor. [ 1004.001945] env[69994]: DEBUG nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.004115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7935ef7e-a919-4038-9abf-d6d62cbceae6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.044243] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925899, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.095356] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52546f8e-1dcd-df4d-4fb3-a738fc2f65b5, 'name': SearchDatastore_Task, 'duration_secs': 0.013403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.096211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.096477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 29326ab7-2b4b-42af-a90c-e86510bcd443/29326ab7-2b4b-42af-a90c-e86510bcd443.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1004.096743] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f846dc8-5ccb-484d-89f0-88d305066555 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.105259] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1004.105259] env[69994]: value = "task-2925900" [ 1004.105259] env[69994]: _type = "Task" [ 1004.105259] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.114242] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.259197] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b688212-0e91-43ad-830b-c50b69706d54 req-c379a2e3-c686-47b4-8be9-5f148ea6de71 service nova] Releasing lock "refresh_cache-f946992b-faf2-4580-adcd-806d3b8fd104" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.500970] env[69994]: DEBUG nova.compute.utils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1004.502520] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1004.502688] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.526335] env[69994]: INFO nova.compute.manager [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Took 19.53 seconds to build instance. [ 1004.543328] env[69994]: DEBUG nova.policy [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e347ed38e9174950b600fb3f5a9ad65a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e63c47302d14d849b239a91580a25ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1004.551853] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925899, 'name': CreateVM_Task, 'duration_secs': 0.613528} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.554720] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.555793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.555942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.556210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.556740] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee5e75de-56e2-4b84-846b-571d9df3afcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.561799] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1004.561799] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e51734-0821-0536-8975-d1480b24f8f1" [ 1004.561799] env[69994]: _type = "Task" [ 1004.561799] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.572654] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e51734-0821-0536-8975-d1480b24f8f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.616148] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925900, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.798064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a155871b-9c53-427d-b600-bb35e8855b18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.807029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68562cb8-e914-40e2-b3b6-943decd69dc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.839749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f99fd8c-e10c-48b5-a5a9-df7304123ac4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.847820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef48108-feb0-4d33-aa1a-86f63af55559 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.863903] env[69994]: DEBUG nova.compute.provider_tree [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.882188] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Successfully created port: 0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1005.008878] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1005.028694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f751665d-b36c-4e38-9e29-9d255e32d616 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.044s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.077103] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e51734-0821-0536-8975-d1480b24f8f1, 'name': SearchDatastore_Task, 'duration_secs': 0.023232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.077443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.077682] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.077951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.078126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.078343] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.078624] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c56ea1b7-ba4f-4490-b4db-ba1a46a9e839 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.094445] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.094445] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.094445] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a242a40c-e89f-4da5-8ee6-d6bb7257fe61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.099102] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1005.099102] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525cc55b-bd84-7dc4-9bdd-dc5e64889bb6" [ 1005.099102] env[69994]: _type = "Task" [ 1005.099102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.107251] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525cc55b-bd84-7dc4-9bdd-dc5e64889bb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.114925] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925900, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.368304] env[69994]: DEBUG nova.scheduler.client.report [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.609847] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525cc55b-bd84-7dc4-9bdd-dc5e64889bb6, 'name': SearchDatastore_Task, 'duration_secs': 0.024974} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.613689] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2ad6e8f-7d71-4a7f-afbc-448d9daaa0ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.622495] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925900, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.623692] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1005.623692] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fa928-841d-edb4-dd48-1960c0e4c6e5" [ 1005.623692] env[69994]: _type = "Task" [ 1005.623692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.631792] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fa928-841d-edb4-dd48-1960c0e4c6e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.706541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.706775] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.873878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.876260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.477s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.877831] env[69994]: INFO nova.compute.claims [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.017074] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1006.042030] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.042030] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.042458] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.042458] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.042555] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.042647] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.042864] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.043038] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.043218] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.043381] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.043553] env[69994]: DEBUG nova.virt.hardware [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.045113] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4d0048-8cd9-41ce-bd7a-d840f43eb4fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.053980] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee772d4-4757-4d63-865d-b72aee4474db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.119395] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925900, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.906047} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.119661] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 29326ab7-2b4b-42af-a90c-e86510bcd443/29326ab7-2b4b-42af-a90c-e86510bcd443.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.119968] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.120252] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83296d2b-48f3-49f6-a0f6-540b971d13f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.128909] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1006.128909] env[69994]: value = "task-2925901" [ 1006.128909] env[69994]: _type = "Task" [ 1006.128909] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.138505] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fa928-841d-edb4-dd48-1960c0e4c6e5, 'name': SearchDatastore_Task, 'duration_secs': 0.014998} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.139183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.139497] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f946992b-faf2-4580-adcd-806d3b8fd104/f946992b-faf2-4580-adcd-806d3b8fd104.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.140390] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99d618cd-4226-4c6c-b743-dcd1d47d25fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.145986] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925901, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.150298] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1006.150298] env[69994]: value = "task-2925902" [ 1006.150298] env[69994]: _type = "Task" [ 1006.150298] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.158497] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.209016] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1006.326354] env[69994]: DEBUG nova.compute.manager [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Received event network-vif-plugged-0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.326587] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.326802] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.327020] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.327221] env[69994]: DEBUG nova.compute.manager [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] No waiting events found dispatching network-vif-plugged-0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.327416] env[69994]: WARNING nova.compute.manager [req-f6d1c22e-669d-4567-9eb2-8003ff740ded req-52a65f83-9fa5-480f-be49-4200d05564ab service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Received unexpected event network-vif-plugged-0c8c3a9b-a328-44f8-81e2-5a480901ac9f for instance with vm_state building and task_state spawning. [ 1006.395472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f969a07-9d0f-4d55-a98e-11822a6e93c9 tempest-ServerActionsV293TestJSON-303158452 tempest-ServerActionsV293TestJSON-303158452-project-member] Lock "4cc99b2f-2d75-4a98-ac02-6b609e0c31d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.390s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.420544] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Successfully updated port: 0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.640355] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925901, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.640659] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.641515] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e714e6e-772a-42ff-8da2-67292da82e1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.668751] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 29326ab7-2b4b-42af-a90c-e86510bcd443/29326ab7-2b4b-42af-a90c-e86510bcd443.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.671915] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e3650d-046a-47a4-bf1d-a36369dd9dd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.692595] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925902, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.694791] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1006.694791] env[69994]: value = "task-2925903" [ 1006.694791] env[69994]: _type = "Task" [ 1006.694791] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.709606] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925903, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.735443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.922798] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.923152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.923201] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.126280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc47e9be-f813-479a-aaf1-421a3a3a8516 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.133788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba054ad-d0f5-4f90-abea-1a09beb8d523 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.166963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee333c7-9645-44a6-b5ef-6aae407d73ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.174457] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.176871] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] f946992b-faf2-4580-adcd-806d3b8fd104/f946992b-faf2-4580-adcd-806d3b8fd104.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.177023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.177368] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a83e6bf3-78d4-4e71-a672-171f6fa65fa4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.180355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a554148-1c8f-421a-aa56-8b44c83a4655 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.193992] env[69994]: DEBUG nova.compute.provider_tree [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.196753] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1007.196753] env[69994]: value = "task-2925904" [ 1007.196753] env[69994]: _type = "Task" [ 1007.196753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.208368] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925904, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.212356] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925903, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.455662] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.619574] env[69994]: DEBUG nova.network.neutron [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.703683] env[69994]: DEBUG nova.scheduler.client.report [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.714832] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925903, 'name': ReconfigVM_Task, 'duration_secs': 0.626135} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.717737] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 29326ab7-2b4b-42af-a90c-e86510bcd443/29326ab7-2b4b-42af-a90c-e86510bcd443.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.718425] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925904, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.719174] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-798be346-f0c8-4641-ab3a-d66ba10a97c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.720800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.721774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21c3f82-e4c2-4650-af0a-bff5cce5e78d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.744399] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] f946992b-faf2-4580-adcd-806d3b8fd104/f946992b-faf2-4580-adcd-806d3b8fd104.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.746046] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8e0a97f-45b4-4c6c-9b1c-358397a40d3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.760045] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1007.760045] env[69994]: value = "task-2925905" [ 1007.760045] env[69994]: _type = "Task" [ 1007.760045] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.768698] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925905, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.793834] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1007.793834] env[69994]: value = "task-2925906" [ 1007.793834] env[69994]: _type = "Task" [ 1007.793834] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.803168] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925906, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.122728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.123200] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Instance network_info: |[{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1008.123661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:51:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c8c3a9b-a328-44f8-81e2-5a480901ac9f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1008.132176] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Creating folder: Project (8e63c47302d14d849b239a91580a25ef). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.132526] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa4fe2b9-95f4-4bac-9897-df4c53d25a9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.143438] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Created folder: Project (8e63c47302d14d849b239a91580a25ef) in parent group-v587342. [ 1008.143700] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Creating folder: Instances. Parent ref: group-v587566. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.143980] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a1bd99c-d3f2-45f9-a7ea-95df82992bd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.153279] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Created folder: Instances in parent group-v587566. [ 1008.153549] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.153797] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1008.154057] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55249dd5-c7bf-4025-943e-f05f87ad26e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.173147] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.173147] env[69994]: value = "task-2925909" [ 1008.173147] env[69994]: _type = "Task" [ 1008.173147] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.181065] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925909, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.211260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.211815] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1008.214391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.464s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.215744] env[69994]: INFO nova.compute.claims [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.271605] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925905, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.305027] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.352368] env[69994]: DEBUG nova.compute.manager [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Received event network-changed-0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.352589] env[69994]: DEBUG nova.compute.manager [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Refreshing instance network info cache due to event network-changed-0c8c3a9b-a328-44f8-81e2-5a480901ac9f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1008.352834] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] Acquiring lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.353022] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] Acquired lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.353220] env[69994]: DEBUG nova.network.neutron [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Refreshing network info cache for port 0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.683921] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925909, 'name': CreateVM_Task, 'duration_secs': 0.412622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.684161] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.685053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.685277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.685660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.685992] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a98b1f9-0057-43d9-aedf-3d758a95e6b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.691198] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1008.691198] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52051e8c-026f-cf05-59ba-8c536079b9a7" [ 1008.691198] env[69994]: _type = "Task" [ 1008.691198] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.703240] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52051e8c-026f-cf05-59ba-8c536079b9a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.719954] env[69994]: DEBUG nova.compute.utils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1008.723528] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1008.771128] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925905, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.804897] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925906, 'name': ReconfigVM_Task, 'duration_secs': 0.90135} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.805225] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfigured VM instance instance-00000051 to attach disk [datastore2] f946992b-faf2-4580-adcd-806d3b8fd104/f946992b-faf2-4580-adcd-806d3b8fd104.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.805889] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a0e9976-a58b-483a-983d-e1571de93e68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.813565] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1008.813565] env[69994]: value = "task-2925910" [ 1008.813565] env[69994]: _type = "Task" [ 1008.813565] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.822433] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925910, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.109566] env[69994]: DEBUG nova.network.neutron [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updated VIF entry in instance network info cache for port 0c8c3a9b-a328-44f8-81e2-5a480901ac9f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1009.110025] env[69994]: DEBUG nova.network.neutron [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.202490] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52051e8c-026f-cf05-59ba-8c536079b9a7, 'name': SearchDatastore_Task, 'duration_secs': 0.012238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.202986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.203372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.203756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.204027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.204334] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.204721] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83aa1eb0-49bb-4541-9f25-4b2d9dc93ff4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.222658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.222846] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.223608] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef37e4ba-0303-489c-9f96-2e0f57b4457e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.226410] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1009.235262] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1009.235262] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523bc656-c268-8770-d174-7ace46d459b4" [ 1009.235262] env[69994]: _type = "Task" [ 1009.235262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.244154] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523bc656-c268-8770-d174-7ace46d459b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.274665] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925905, 'name': Rename_Task, 'duration_secs': 1.142377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.274943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.275218] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eec6f815-b503-4846-a639-8a1f86fb8e6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.282880] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1009.282880] env[69994]: value = "task-2925911" [ 1009.282880] env[69994]: _type = "Task" [ 1009.282880] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.290310] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.326492] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925910, 'name': Rename_Task, 'duration_secs': 0.190428} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.326857] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.329367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3b8ecdd-a7c2-470d-9745-4869066262c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.336213] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1009.336213] env[69994]: value = "task-2925912" [ 1009.336213] env[69994]: _type = "Task" [ 1009.336213] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.346405] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.497489] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9f0f7d-fbe5-4872-bdec-709760f290c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.504251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930aa2b3-57ec-4b73-aeed-52dd75efb620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.536926] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6116fa-8518-43bc-8c94-96e832104c4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.544642] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93856527-efdc-41ef-99fc-91059131e1ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.558235] env[69994]: DEBUG nova.compute.provider_tree [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.613058] env[69994]: DEBUG oslo_concurrency.lockutils [req-5a0f80d3-67b0-40b3-8a3c-09bd2546281d req-f21663a9-a7a3-4705-8b74-e53cb58f7703 service nova] Releasing lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.747151] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523bc656-c268-8770-d174-7ace46d459b4, 'name': SearchDatastore_Task, 'duration_secs': 0.022647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.748083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fe7b42d-cb1d-443c-8de1-be31c25669e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.753467] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1009.753467] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d58a9e-5ee9-c021-9083-901bf93c88a0" [ 1009.753467] env[69994]: _type = "Task" [ 1009.753467] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.761552] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d58a9e-5ee9-c021-9083-901bf93c88a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.792354] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925911, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.846401] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.061252] env[69994]: DEBUG nova.scheduler.client.report [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.242614] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1010.266285] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d58a9e-5ee9-c021-9083-901bf93c88a0, 'name': SearchDatastore_Task, 'duration_secs': 0.016221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.268518] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1010.268769] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.268926] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1010.269121] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.269270] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1010.269416] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1010.269622] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1010.269781] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1010.270029] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1010.270220] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1010.270409] env[69994]: DEBUG nova.virt.hardware [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1010.270705] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.270970] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 68eba44a-0989-47dc-a88b-102d9aa34c5d/68eba44a-0989-47dc-a88b-102d9aa34c5d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.271768] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85a4497-a4a7-42fd-97f4-72baf6e1fa1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.274379] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8461d96-8042-4243-a183-df21707a7522 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.282309] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a874df1-4b74-4ab3-b85c-585147423e48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.287252] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1010.287252] env[69994]: value = "task-2925913" [ 1010.287252] env[69994]: _type = "Task" [ 1010.287252] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.301998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.308027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Creating folder: Project (7c78abe397ac44f6930291ed079de986). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.311990] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a44ee43-6838-418d-ac02-e65a5f67c63d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.313857] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925911, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.317138] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.326114] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Created folder: Project (7c78abe397ac44f6930291ed079de986) in parent group-v587342. [ 1010.326372] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Creating folder: Instances. Parent ref: group-v587569. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.326638] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfb04c7a-44ac-4efa-bb32-131c3acc87af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.336775] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Created folder: Instances in parent group-v587569. [ 1010.337068] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.337357] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.341099] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f2ec83b-5e48-4bb0-8017-8dfe1e9dab5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.358328] env[69994]: DEBUG oslo_vmware.api [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925912, 'name': PowerOnVM_Task, 'duration_secs': 0.606031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.359553] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.359768] env[69994]: INFO nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1010.359983] env[69994]: DEBUG nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.360255] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.360255] env[69994]: value = "task-2925916" [ 1010.360255] env[69994]: _type = "Task" [ 1010.360255] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.361033] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f175f3-57f2-43ea-a955-d691ba333810 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.375502] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925916, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.568512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.569035] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1010.572361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.598s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.805250] env[69994]: DEBUG oslo_vmware.api [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925911, 'name': PowerOnVM_Task, 'duration_secs': 1.105862} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.809903] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.810257] env[69994]: INFO nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Took 11.89 seconds to spawn the instance on the hypervisor. [ 1010.810541] env[69994]: DEBUG nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.810932] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925913, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.811945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ca30f6-e2f1-408d-83ff-46ee9ca5b966 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.874021] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925916, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.882721] env[69994]: INFO nova.compute.manager [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Took 20.18 seconds to build instance. [ 1011.076026] env[69994]: DEBUG nova.compute.utils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.085459] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.085658] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.149397] env[69994]: DEBUG nova.policy [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d6a1603506e4d48a9d2f8bf61475821', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f602778aac0d41c49e73c2450f31d711', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.186388] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1011.187594] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac552345-f30d-4801-b0f8-fa863807fc99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.194036] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1011.194180] env[69994]: ERROR oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk due to incomplete transfer. [ 1011.194432] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-21eefe39-5cf2-4970-b80d-cb45ed3de1a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.201724] env[69994]: DEBUG oslo_vmware.rw_handles [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528c6895-5d36-9ab3-9933-a16836c1557f/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1011.201932] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Uploaded image ee1b98b0-022f-4790-960d-699d8d8d274a to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1011.204577] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1011.204854] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-71ac2645-5d41-4fa2-a18f-e2fd9cabcba9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.211686] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1011.211686] env[69994]: value = "task-2925917" [ 1011.211686] env[69994]: _type = "Task" [ 1011.211686] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.219777] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925917, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.303076] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.304391] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 68eba44a-0989-47dc-a88b-102d9aa34c5d/68eba44a-0989-47dc-a88b-102d9aa34c5d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.304620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.307024] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0fb5132-b125-459f-9517-8c5e1b60f214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.313864] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1011.313864] env[69994]: value = "task-2925918" [ 1011.313864] env[69994]: _type = "Task" [ 1011.313864] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.321578] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.330978] env[69994]: INFO nova.compute.manager [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Took 25.57 seconds to build instance. [ 1011.375623] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925916, 'name': CreateVM_Task, 'duration_secs': 0.526446} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.375808] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.376659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.376772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.377092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1011.377385] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-821da910-d614-4ef3-8bd2-bd62751597b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.385028] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1011.385028] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298e359-75aa-97e4-0b13-851b7f855b53" [ 1011.385028] env[69994]: _type = "Task" [ 1011.385028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.387579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5ca4e2f-fea1-4164-bbf9-bed7dab6290e tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.699s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.393074] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298e359-75aa-97e4-0b13-851b7f855b53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.473481] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Successfully created port: ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1011.586328] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1011.616371] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ab320e59-febb-4f8f-9bc4-74227d29c752 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.616518] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5acdf02b-f61c-46ff-9c36-8e86b9be7738 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.616657] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 06fa5ab5-baab-466e-8574-5391247c13a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.616769] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d5af7ae1-d68e-4170-b762-e56d7f2551d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.616878] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f0b77732-aae1-4790-a2c7-75586e78eda6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.616987] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617114] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 660277f8-a7ff-43a9-8068-15e3db5a1069 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617224] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance e1c00159-d198-4858-b5a3-aa05152b1fda actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617336] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 76dbf172-10b2-4439-9d2a-8226ba46062d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617449] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance c98308b3-2431-4f17-9022-bcd9f1e83a35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617709] env[69994]: WARNING nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d1875a97-9eba-47be-a76d-6088cb13412b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1011.617709] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5e142f6e-920a-4f11-abff-13eb5c168660 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.617919] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29326ab7-2b4b-42af-a90c-e86510bcd443 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.618008] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f946992b-faf2-4580-adcd-806d3b8fd104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.618067] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 68eba44a-0989-47dc-a88b-102d9aa34c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.618166] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1ff25686-e13e-4003-909b-18bf919aa20c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.618269] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5b9648a7-f26f-4151-be5c-59991035a529 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.721641] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925917, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.810949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "29326ab7-2b4b-42af-a90c-e86510bcd443" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.823974] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.227454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.825046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.825271] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05af963a-a009-4bc0-a199-6d49f956a316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.838950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be6c907a-fe6a-4bec-8783-8f1c6e309077 tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.095s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.847816] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 68eba44a-0989-47dc-a88b-102d9aa34c5d/68eba44a-0989-47dc-a88b-102d9aa34c5d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.848165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.037s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.848408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.848623] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.848787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.850339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a20beb5-7b12-44aa-9772-90fa3b78e511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.864657] env[69994]: INFO nova.compute.manager [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Terminating instance [ 1011.870846] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1011.870846] env[69994]: value = "task-2925919" [ 1011.870846] env[69994]: _type = "Task" [ 1011.870846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.881221] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925919, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.891601] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5298e359-75aa-97e4-0b13-851b7f855b53, 'name': SearchDatastore_Task, 'duration_secs': 0.042973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.891864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.892117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.892349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.892526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.892722] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.892963] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e6979e6-0a32-41c6-8042-6c4150552e94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.902959] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.903169] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.903894] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-236b4f3b-d853-4e72-a0fe-e6c74764341a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.908484] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1011.908484] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52834ea6-42a5-bb40-2100-ddc6a13ef6cc" [ 1011.908484] env[69994]: _type = "Task" [ 1011.908484] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.916163] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52834ea6-42a5-bb40-2100-ddc6a13ef6cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.121295] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 686feb53-00e2-43d9-b316-09c089df0891 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.222049] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925917, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.399072] env[69994]: DEBUG nova.compute.manager [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1012.399072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.399072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e46113d-a3e3-4647-a4ef-eceed00dd1f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.399072] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.399072] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.399072] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3aa6f997-0284-44cb-8c19-f55f7de9f33d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.399072] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1012.399072] env[69994]: value = "task-2925920" [ 1012.399072] env[69994]: _type = "Task" [ 1012.399072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.406911] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.418262] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52834ea6-42a5-bb40-2100-ddc6a13ef6cc, 'name': SearchDatastore_Task, 'duration_secs': 0.044559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.419073] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d72930d2-d8ab-4cf8-8647-fdc6b4be5cf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.424149] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1012.424149] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ca3f2-3554-2224-8a36-96a4db67a93a" [ 1012.424149] env[69994]: _type = "Task" [ 1012.424149] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.431807] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ca3f2-3554-2224-8a36-96a4db67a93a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.484416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.484695] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.596489] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1012.623309] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1012.623576] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.623735] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1012.623945] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.624126] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1012.624276] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1012.624484] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1012.624649] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1012.624815] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1012.624979] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1012.625180] env[69994]: DEBUG nova.virt.hardware [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1012.625860] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.627795] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b519328a-2452-4743-86c8-fcb4a42c1e6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.636237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875d8315-7a70-408c-879c-e61141bd16f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.724058] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925917, 'name': Destroy_Task, 'duration_secs': 1.061471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.724398] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Destroyed the VM [ 1012.724647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1012.724922] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d98f2df4-dabf-4dd1-822a-8a68409c3b83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.730978] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1012.730978] env[69994]: value = "task-2925921" [ 1012.730978] env[69994]: _type = "Task" [ 1012.730978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.738814] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925921, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.890258] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925919, 'name': ReconfigVM_Task, 'duration_secs': 0.989658} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.890611] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 68eba44a-0989-47dc-a88b-102d9aa34c5d/68eba44a-0989-47dc-a88b-102d9aa34c5d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.891357] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b4f7c5c-cc1c-411a-9445-d27d458de769 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.897593] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1012.897593] env[69994]: value = "task-2925922" [ 1012.897593] env[69994]: _type = "Task" [ 1012.897593] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.909435] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925922, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.912700] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925920, 'name': PowerOffVM_Task, 'duration_secs': 0.198229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.912833] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.913066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.913286] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e88b315-e595-4aef-b58b-c606298264fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.934485] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ca3f2-3554-2224-8a36-96a4db67a93a, 'name': SearchDatastore_Task, 'duration_secs': 0.012864} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.934739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.934996] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.935273] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f639c30-9130-4902-8321-f067ebc03806 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.945295] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1012.945295] env[69994]: value = "task-2925924" [ 1012.945295] env[69994]: _type = "Task" [ 1012.945295] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.953725] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.957537] env[69994]: DEBUG nova.compute.manager [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Received event network-vif-plugged-ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1012.957818] env[69994]: DEBUG oslo_concurrency.lockutils [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.958031] env[69994]: DEBUG oslo_concurrency.lockutils [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.958221] env[69994]: DEBUG oslo_concurrency.lockutils [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.958393] env[69994]: DEBUG nova.compute.manager [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] No waiting events found dispatching network-vif-plugged-ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.958554] env[69994]: WARNING nova.compute.manager [req-47d1c51b-8fcf-403c-ab43-6f7d51b7d496 req-7e87ac5f-95b2-4317-bc75-8c0bc45dd70f service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Received unexpected event network-vif-plugged-ad28c14f-638f-4073-b494-cb6a2a579dab for instance with vm_state building and task_state spawning. [ 1012.977403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.977634] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.977818] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Deleting the datastore file [datastore2] 29326ab7-2b4b-42af-a90c-e86510bcd443 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.978131] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a0b7d9c-e789-466e-bb5b-11f312c8b7b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.984403] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for the task: (returnval){ [ 1012.984403] env[69994]: value = "task-2925925" [ 1012.984403] env[69994]: _type = "Task" [ 1012.984403] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.988597] env[69994]: DEBUG nova.compute.utils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.994767] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.019441] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Successfully updated port: ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1013.132152] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1013.132508] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1013.132694] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1013.242875] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925921, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.413090] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925922, 'name': Rename_Task, 'duration_secs': 0.188958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.413601] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.413686] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42df2885-6f5a-4f44-b2e4-dcc85fff7080 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.427065] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1013.427065] env[69994]: value = "task-2925926" [ 1013.427065] env[69994]: _type = "Task" [ 1013.427065] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.440417] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925926, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.457959] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925924, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.491669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.495787] env[69994]: DEBUG oslo_vmware.api [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Task: {'id': task-2925925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.499477] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.499652] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.499785] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.500796] env[69994]: INFO nova.compute.manager [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1013.500796] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.501108] env[69994]: DEBUG nova.compute.manager [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1013.501230] env[69994]: DEBUG nova.network.neutron [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.521983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.522220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.522379] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.525200] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6edc04-aa4f-4710-bb79-875a632150fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.533108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708ad5aa-2b80-4af9-994d-7b9f233b6be2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.567647] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa26c97-e451-48ea-9c8f-53b318402bcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.575108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6353291-123f-4df6-a46a-f01e84bc9cf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.588842] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1013.741592] env[69994]: DEBUG oslo_vmware.api [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925921, 'name': RemoveSnapshot_Task, 'duration_secs': 0.817355} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.741859] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1013.742100] env[69994]: INFO nova.compute.manager [None req-d763f0bd-d830-4bd6-89ac-5729915c8439 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Took 17.77 seconds to snapshot the instance on the hypervisor. [ 1013.937935] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925926, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.954422] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550825} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.954699] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.954918] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.955200] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2af88d39-3681-48d0-8d1d-2edc7477f2ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.961584] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1013.961584] env[69994]: value = "task-2925927" [ 1013.961584] env[69994]: _type = "Task" [ 1013.961584] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.968996] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.060324] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.111557] env[69994]: ERROR nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [req-97b5707a-f6ee-4c8e-bb4e-ad018c24ca79] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-97b5707a-f6ee-4c8e-bb4e-ad018c24ca79"}]} [ 1014.132935] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1014.155056] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1014.155246] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.167476] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1014.186313] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1014.221435] env[69994]: DEBUG nova.network.neutron [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.239759] env[69994]: DEBUG nova.network.neutron [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.441455] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925926, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.455087] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b33a215-f932-4ce0-a014-15ffb6c43df8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.465210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037ccff3-2e10-4db7-a784-1d2f4154e9f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.472630] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06655} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.499285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.500660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08976071-e343-4ce2-89c4-fcebeabcf1f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.503832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c99d6f1-adc5-408c-893a-573e3bf420c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.524017] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.526264] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63e07267-7cb4-4138-b8dc-d4b77e78fcf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.541285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c533793-eb40-49db-ac2a-cd8c40705b8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.554155] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.556543] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1014.556543] env[69994]: value = "task-2925928" [ 1014.556543] env[69994]: _type = "Task" [ 1014.556543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.564016] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.569987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.570295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.570496] env[69994]: INFO nova.compute.manager [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Attaching volume dcf0e539-f80c-4035-8888-0a3f0ceb66f0 to /dev/sdb [ 1014.607157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d145c3-6155-47f2-ba40-60c183885c0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.615033] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549532e8-f2d4-4a30-9cf8-0d4bd7009b13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.627862] env[69994]: DEBUG nova.virt.block_device [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Updating existing volume attachment record: 846adc53-9d5b-48c7-9170-8d6496c617d9 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1014.726938] env[69994]: INFO nova.compute.manager [-] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Took 1.22 seconds to deallocate network for instance. [ 1014.743747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.744008] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Instance network_info: |[{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1014.744456] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:ef:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad28c14f-638f-4073-b494-cb6a2a579dab', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.752413] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.753035] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.754247] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64aa6e95-94bc-429e-9aef-77754f83737a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.774259] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.774259] env[69994]: value = "task-2925930" [ 1014.774259] env[69994]: _type = "Task" [ 1014.774259] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.785381] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925930, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.941134] env[69994]: DEBUG oslo_vmware.api [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2925926, 'name': PowerOnVM_Task, 'duration_secs': 1.333496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.941480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.941716] env[69994]: INFO nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1014.942059] env[69994]: DEBUG nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.942823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6b97c6-25c7-4cc0-b30a-8e78ec42f946 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.990082] env[69994]: DEBUG nova.compute.manager [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Received event network-changed-ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.990376] env[69994]: DEBUG nova.compute.manager [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Refreshing instance network info cache due to event network-changed-ad28c14f-638f-4073-b494-cb6a2a579dab. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1014.990630] env[69994]: DEBUG oslo_concurrency.lockutils [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] Acquiring lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.990815] env[69994]: DEBUG oslo_concurrency.lockutils [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] Acquired lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.991026] env[69994]: DEBUG nova.network.neutron [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Refreshing network info cache for port ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.072158] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925928, 'name': ReconfigVM_Task, 'duration_secs': 0.308127} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.072523] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.073182] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dbdb382-83f0-461e-8b82-5ad27706a62b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.080067] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1015.080067] env[69994]: value = "task-2925933" [ 1015.080067] env[69994]: _type = "Task" [ 1015.080067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.088274] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925933, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.093570] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1015.093786] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 118 to 119 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1015.093936] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.232570] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.284248] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925930, 'name': CreateVM_Task, 'duration_secs': 0.348281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.284248] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.284908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.285093] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.285413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1015.285674] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0841ef2c-dd0e-49c3-a07a-78d80543c8ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.290332] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1015.290332] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031c82-960a-3922-3dac-db0ebf489ad1" [ 1015.290332] env[69994]: _type = "Task" [ 1015.290332] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.297953] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031c82-960a-3922-3dac-db0ebf489ad1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.334154] env[69994]: DEBUG nova.compute.manager [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.335168] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a7f9fd-934e-4b32-ab7b-3055bbed5db3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.464195] env[69994]: INFO nova.compute.manager [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Took 20.64 seconds to build instance. [ 1015.590338] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925933, 'name': Rename_Task, 'duration_secs': 0.143692} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.590629] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.590868] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43285b76-6788-42b5-931f-7c2a78229861 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.596804] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1015.596804] env[69994]: value = "task-2925934" [ 1015.596804] env[69994]: _type = "Task" [ 1015.596804] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.600663] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1015.600852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.029s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.601352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.521s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.603203] env[69994]: INFO nova.compute.claims [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.612737] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.748538] env[69994]: DEBUG nova.network.neutron [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updated VIF entry in instance network info cache for port ad28c14f-638f-4073-b494-cb6a2a579dab. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.748703] env[69994]: DEBUG nova.network.neutron [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.803612] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031c82-960a-3922-3dac-db0ebf489ad1, 'name': SearchDatastore_Task, 'duration_secs': 0.010235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.804236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.804507] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.804794] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.804996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.805219] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.805528] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3873a9c5-ca9d-4f10-8167-eabc09d7b6ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.814928] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.815143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.815896] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba45968-0239-40c1-b6e9-2a732bc09b8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.826444] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1015.826444] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dfdaba-a4c5-ba55-5231-43ae6754b42e" [ 1015.826444] env[69994]: _type = "Task" [ 1015.826444] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.835222] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dfdaba-a4c5-ba55-5231-43ae6754b42e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.846034] env[69994]: INFO nova.compute.manager [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] instance snapshotting [ 1015.852170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6042db8b-fdc2-4bf4-a7dd-9c29f9a379aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.873024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d32a5c2-4bee-4386-8d6d-08794dbe9752 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.895536] env[69994]: DEBUG nova.compute.manager [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Received event network-changed-0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.896185] env[69994]: DEBUG nova.compute.manager [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Refreshing instance network info cache due to event network-changed-0c8c3a9b-a328-44f8-81e2-5a480901ac9f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1015.896185] env[69994]: DEBUG oslo_concurrency.lockutils [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] Acquiring lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.896185] env[69994]: DEBUG oslo_concurrency.lockutils [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] Acquired lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.896502] env[69994]: DEBUG nova.network.neutron [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Refreshing network info cache for port 0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.966006] env[69994]: DEBUG oslo_concurrency.lockutils [None req-912b92df-6c97-4591-b8a0-c88333ca271d tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.154s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.109464] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.252937] env[69994]: DEBUG oslo_concurrency.lockutils [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] Releasing lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.253232] env[69994]: DEBUG nova.compute.manager [req-fdf812a8-8c62-43b1-90af-9b6df42bda61 req-c25000c9-a9c4-480e-b4c7-caff7ec4de71 service nova] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Received event network-vif-deleted-b6f3b951-fb8f-4467-98e6-ec4cdf4169cd {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1016.337314] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dfdaba-a4c5-ba55-5231-43ae6754b42e, 'name': SearchDatastore_Task, 'duration_secs': 0.014701} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.338213] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4985de2d-3245-48e5-8ec0-40e3420a1f6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.343155] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1016.343155] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b69933-4614-bf3d-41d5-cc4e89cc83b5" [ 1016.343155] env[69994]: _type = "Task" [ 1016.343155] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.350355] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b69933-4614-bf3d-41d5-cc4e89cc83b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.384598] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1016.384598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ed89ef89-bc7a-4809-a992-051c1ca53554 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.391291] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1016.391291] env[69994]: value = "task-2925935" [ 1016.391291] env[69994]: _type = "Task" [ 1016.391291] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.403236] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925935, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.608035] env[69994]: DEBUG oslo_vmware.api [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925934, 'name': PowerOnVM_Task, 'duration_secs': 0.642407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.610419] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.610634] env[69994]: INFO nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1016.610889] env[69994]: DEBUG nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.614373] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bada09b-66ca-413c-b375-d04c4b5f5c41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.649758] env[69994]: DEBUG nova.network.neutron [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updated VIF entry in instance network info cache for port 0c8c3a9b-a328-44f8-81e2-5a480901ac9f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.650262] env[69994]: DEBUG nova.network.neutron [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.857435] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b69933-4614-bf3d-41d5-cc4e89cc83b5, 'name': SearchDatastore_Task, 'duration_secs': 0.042815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.858618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.858881] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.859692] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d8e276-66c2-4379-94ca-8e652883df73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.862222] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-371d14bd-5af9-4a9e-b96c-7c4851c6d790 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.869630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc23d7d2-f89e-4273-8cc7-bccf7e2f6d0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.873111] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1016.873111] env[69994]: value = "task-2925936" [ 1016.873111] env[69994]: _type = "Task" [ 1016.873111] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.906150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48c00d2-e805-4236-9299-11aa254144aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.911635] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.916634] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925935, 'name': CreateSnapshot_Task, 'duration_secs': 0.467795} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.918632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1016.919414] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1e90a3-6a53-423f-9eb6-14f13c1d6450 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.923060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6d1c88-2cd1-4c47-9655-4491c2e3b5c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.945823] env[69994]: DEBUG nova.compute.provider_tree [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.134913] env[69994]: INFO nova.compute.manager [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Took 20.75 seconds to build instance. [ 1017.152868] env[69994]: DEBUG oslo_concurrency.lockutils [req-2794eb88-a831-4dd5-8847-baf10e8613f0 req-f7fab121-7e33-4afa-995e-ec1815e764da service nova] Releasing lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.383459] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.383742] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.384026] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.384259] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a5ccbd0-a573-4743-a13f-d64f35d509a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.390389] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1017.390389] env[69994]: value = "task-2925938" [ 1017.390389] env[69994]: _type = "Task" [ 1017.390389] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.399231] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.456113] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1017.457973] env[69994]: DEBUG nova.scheduler.client.report [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.461052] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-52b762f0-3a20-4506-bc75-5bbd94069717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.469765] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1017.469765] env[69994]: value = "task-2925939" [ 1017.469765] env[69994]: _type = "Task" [ 1017.469765] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.480893] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925939, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.637839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12b1138e-ae2b-494f-b3d0-f255d2d1a383 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.271s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.902814] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.903105] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.903902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64219ec4-1d31-4c3f-988f-b4dc33445eba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.926733] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.927122] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d60f0cba-df5a-4e53-8479-a945a089d6de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.949840] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1017.949840] env[69994]: value = "task-2925940" [ 1017.949840] env[69994]: _type = "Task" [ 1017.949840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.960870] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925940, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.965767] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.966302] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.969259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.321s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.969259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.972377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.072s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.972716] env[69994]: INFO nova.compute.claims [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.986648] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925939, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.008964] env[69994]: INFO nova.scheduler.client.report [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocations for instance d1875a97-9eba-47be-a76d-6088cb13412b [ 1018.461411] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.477664] env[69994]: DEBUG nova.compute.utils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1018.481939] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1018.492282] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925939, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.517091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60a2cadc-63e3-4454-9275-21009937d911 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "d1875a97-9eba-47be-a76d-6088cb13412b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.552s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.688471] env[69994]: INFO nova.compute.manager [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Rebuilding instance [ 1018.734193] env[69994]: DEBUG nova.compute.manager [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.735130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2cbeaf-f5d0-4e38-a7e7-cff2ba68f87c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.962222] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925940, 'name': ReconfigVM_Task, 'duration_secs': 0.641738} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.962507] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.963112] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43851124-9be8-40cb-8da7-eea9791819d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.969191] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1018.969191] env[69994]: value = "task-2925941" [ 1018.969191] env[69994]: _type = "Task" [ 1018.969191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.977720] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925941, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.981228] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.992516] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925939, 'name': CloneVM_Task, 'duration_secs': 1.361049} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.992806] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Created linked-clone VM from snapshot [ 1018.993660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4543fa8-40ac-4b34-9389-879685b2d25b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.001233] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Uploading image f3f8b9e0-bc3e-4bd4-800b-f48f29e25b1e {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1019.028484] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1019.028484] env[69994]: value = "vm-587576" [ 1019.028484] env[69994]: _type = "VirtualMachine" [ 1019.028484] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1019.028759] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bc58281e-cb2e-43fb-a3b8-ddfc42900dab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.035122] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease: (returnval){ [ 1019.035122] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226e0c0-58e5-0914-f3a4-91daacee6546" [ 1019.035122] env[69994]: _type = "HttpNfcLease" [ 1019.035122] env[69994]: } obtained for exporting VM: (result){ [ 1019.035122] env[69994]: value = "vm-587576" [ 1019.035122] env[69994]: _type = "VirtualMachine" [ 1019.035122] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1019.035401] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the lease: (returnval){ [ 1019.035401] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226e0c0-58e5-0914-f3a4-91daacee6546" [ 1019.035401] env[69994]: _type = "HttpNfcLease" [ 1019.035401] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1019.044718] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1019.044718] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226e0c0-58e5-0914-f3a4-91daacee6546" [ 1019.044718] env[69994]: _type = "HttpNfcLease" [ 1019.044718] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1019.178816] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1019.178816] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587574', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'name': 'volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f946992b-faf2-4580-adcd-806d3b8fd104', 'attached_at': '', 'detached_at': '', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'serial': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1019.179700] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bade46f-178b-4da2-8e09-a4856d2bb2e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.201122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a88db2-4795-4756-9c7d-02211f15eb3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.228826] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0/volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.231953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a9b4407-47f5-4cd8-93e9-81352f29c463 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.252147] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1019.252147] env[69994]: value = "task-2925943" [ 1019.252147] env[69994]: _type = "Task" [ 1019.252147] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.263221] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.303260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29cd9bd-660f-4328-a482-5924253875e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.311947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5074fa5-b0fa-4cd0-8e54-cbcc9dcef0f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.348456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a8aa60-5271-40ec-865e-f02902da8d29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.357438] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b44a71a-d30b-4a45-a7f9-bd04fc9fd96d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.372969] env[69994]: DEBUG nova.compute.provider_tree [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1019.479437] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925941, 'name': Rename_Task, 'duration_secs': 0.258464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.479718] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1019.479966] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e31d511-6f5a-4424-8a28-24ef327fa5a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.485524] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1019.485524] env[69994]: value = "task-2925944" [ 1019.485524] env[69994]: _type = "Task" [ 1019.485524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.496390] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.544521] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1019.544521] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226e0c0-58e5-0914-f3a4-91daacee6546" [ 1019.544521] env[69994]: _type = "HttpNfcLease" [ 1019.544521] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1019.544855] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1019.544855] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226e0c0-58e5-0914-f3a4-91daacee6546" [ 1019.544855] env[69994]: _type = "HttpNfcLease" [ 1019.544855] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1019.545835] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e229f0-cef0-43d6-933f-edf5d12159a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.553316] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1019.553506] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1019.650693] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8b2c995a-dcd8-4ca8-942a-9517e073ebb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.748740] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.749095] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6004f02-8689-4bb7-a25e-beb28e20e8f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.757227] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1019.757227] env[69994]: value = "task-2925945" [ 1019.757227] env[69994]: _type = "Task" [ 1019.757227] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.763724] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925943, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.768188] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.912378] env[69994]: DEBUG nova.scheduler.client.report [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1019.912865] env[69994]: DEBUG nova.compute.provider_tree [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 119 to 120 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1019.912970] env[69994]: DEBUG nova.compute.provider_tree [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1019.993770] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1019.999950] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925944, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.017113] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.017786] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.017930] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.018262] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.018437] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.018649] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.018907] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.019195] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.019493] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.019684] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.019926] env[69994]: DEBUG nova.virt.hardware [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.020962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be9e67e-7b9f-413e-bb8b-426e3b9d7651 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.030192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fee9709-5a2a-4c0e-9208-ba190caebb0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.045928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.052521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Creating folder: Project (764748819c2c4c1788cb5894564a462a). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1020.053021] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8c117f9-a004-4c7d-a687-7549a203be8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.063492] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Created folder: Project (764748819c2c4c1788cb5894564a462a) in parent group-v587342. [ 1020.063723] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Creating folder: Instances. Parent ref: group-v587577. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1020.064459] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c29102ad-e22a-4891-a265-caa7789c2d55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.072991] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Created folder: Instances in parent group-v587577. [ 1020.073740] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1020.073839] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.074013] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6afa7b34-38ef-457f-8586-90a3f2deb1d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.092697] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.092697] env[69994]: value = "task-2925948" [ 1020.092697] env[69994]: _type = "Task" [ 1020.092697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.100804] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925948, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.265966] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925943, 'name': ReconfigVM_Task, 'duration_secs': 0.728429} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.267227] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfigured VM instance instance-00000051 to attach disk [datastore1] volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0/volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.275865] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90b59264-ab2b-4d07-8209-f5f1820dfbd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.286611] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925945, 'name': PowerOffVM_Task, 'duration_secs': 0.139137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.287035] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.287847] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.289295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b610500-f94f-42d3-8703-b35072fa21b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.293759] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1020.293759] env[69994]: value = "task-2925949" [ 1020.293759] env[69994]: _type = "Task" [ 1020.293759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.300047] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.300675] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7bc7b7d-7009-4226-878b-11d26a1be6b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.306659] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925949, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.336037] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.336489] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.336761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Deleting the datastore file [datastore1] 1ff25686-e13e-4003-909b-18bf919aa20c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.337255] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-969d8336-a541-4865-ba90-09b4c3fa9da4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.344890] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1020.344890] env[69994]: value = "task-2925951" [ 1020.344890] env[69994]: _type = "Task" [ 1020.344890] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.355897] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.419859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.420701] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1020.424453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.689s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.426213] env[69994]: INFO nova.compute.claims [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.498688] env[69994]: DEBUG oslo_vmware.api [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925944, 'name': PowerOnVM_Task, 'duration_secs': 0.658261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.499599] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.499599] env[69994]: INFO nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1020.499801] env[69994]: DEBUG nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1020.500791] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5859bc5-6080-4375-9987-fac4182cc960 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.603792] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925948, 'name': CreateVM_Task, 'duration_secs': 0.401005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.604184] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.604817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.605183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.605648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1020.606122] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd824ad9-48cb-43f4-9d4e-95b1613f9806 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.613671] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1020.613671] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f8dd1-309d-8c05-b19a-917072baf97e" [ 1020.613671] env[69994]: _type = "Task" [ 1020.613671] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.622402] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f8dd1-309d-8c05-b19a-917072baf97e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.804664] env[69994]: DEBUG oslo_vmware.api [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925949, 'name': ReconfigVM_Task, 'duration_secs': 0.183982} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.805158] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587574', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'name': 'volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f946992b-faf2-4580-adcd-806d3b8fd104', 'attached_at': '', 'detached_at': '', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'serial': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1020.838210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "660277f8-a7ff-43a9-8068-15e3db5a1069" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.838497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.838720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.838957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.839164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.841273] env[69994]: INFO nova.compute.manager [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Terminating instance [ 1020.855569] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.856119] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.856326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.856507] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.930580] env[69994]: DEBUG nova.compute.utils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1020.934189] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1020.935741] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.982914] env[69994]: DEBUG nova.policy [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5244db8b475345e2aad86c5eaa32a261', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23d78573882440eca91629924f6aa7c6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1021.020311] env[69994]: INFO nova.compute.manager [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Took 24.29 seconds to build instance. [ 1021.124877] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f8dd1-309d-8c05-b19a-917072baf97e, 'name': SearchDatastore_Task, 'duration_secs': 0.019914} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.125370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.125688] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.125965] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.126132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.127602] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.127602] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe6e35d4-f084-4b77-917e-9065e074afc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.136231] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.137051] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.137423] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f3c3299-97d2-46e4-8d57-d8782e0ba79b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.143163] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1021.143163] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ebcd1-1c27-0a1a-5a7a-fb239f16cf81" [ 1021.143163] env[69994]: _type = "Task" [ 1021.143163] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.151188] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ebcd1-1c27-0a1a-5a7a-fb239f16cf81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.301850] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Successfully created port: a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.346121] env[69994]: DEBUG nova.compute.manager [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1021.347602] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.348208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb6d0d4-1957-40c7-b62e-bb140aa80749 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.357422] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.357808] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0abb616-148a-4387-9c7a-29a10dc702f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.370638] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 1021.370638] env[69994]: value = "task-2925952" [ 1021.370638] env[69994]: _type = "Task" [ 1021.370638] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.381428] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.435517] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1021.522505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3726d75-ff07-40b9-88dd-172b8403d786 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.796s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.664289] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521ebcd1-1c27-0a1a-5a7a-fb239f16cf81, 'name': SearchDatastore_Task, 'duration_secs': 0.016551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.668117] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e362e3-c317-46f5-a5d6-44e1b4325152 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.673676] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1021.673676] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527e1a68-54cb-69ba-57c9-2c9e7f6cfe53" [ 1021.673676] env[69994]: _type = "Task" [ 1021.673676] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.683158] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527e1a68-54cb-69ba-57c9-2c9e7f6cfe53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.758628] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09200fbf-f4c9-4979-9fa9-07ee621438ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.767031] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9389a8-96a0-4d61-9690-862f21e15bfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.797850] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76e0eb4-648e-4644-b16b-278626a04cdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.805899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316a0b40-aa1e-472a-bea6-327c631de63d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.820016] env[69994]: DEBUG nova.compute.provider_tree [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.866590] env[69994]: DEBUG nova.objects.instance [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'flavor' on Instance uuid f946992b-faf2-4580-adcd-806d3b8fd104 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.882352] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925952, 'name': PowerOffVM_Task, 'duration_secs': 0.212452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.882745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.882912] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.883377] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f11871ce-5039-46e1-98a0-01dc7b6eaed9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.896615] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.896869] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.897381] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.897658] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.897819] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.898306] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.898762] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.898946] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.899145] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.899310] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.899473] env[69994]: DEBUG nova.virt.hardware [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.900954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b92be9b-1dd7-4c9c-a14e-909d69397683 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.909598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710c52a0-d490-46aa-b6f6-1e193ef96d67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.925661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.932257] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.932801] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.933044] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce7c205f-6c82-4dc3-81b3-c299fb253c25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.953717] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.954118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.954373] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Deleting the datastore file [datastore2] 660277f8-a7ff-43a9-8068-15e3db5a1069 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.954646] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8eaa0019-7f09-46d0-b3f1-1713bfc3868f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.958733] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.958733] env[69994]: value = "task-2925954" [ 1021.958733] env[69994]: _type = "Task" [ 1021.958733] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.963440] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for the task: (returnval){ [ 1021.963440] env[69994]: value = "task-2925955" [ 1021.963440] env[69994]: _type = "Task" [ 1021.963440] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.969942] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925954, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.975383] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.030136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.186291] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527e1a68-54cb-69ba-57c9-2c9e7f6cfe53, 'name': SearchDatastore_Task, 'duration_secs': 0.01721} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.186291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.186291] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.186291] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b829d561-306f-444a-964e-84f4b390b8b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.194255] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1022.194255] env[69994]: value = "task-2925956" [ 1022.194255] env[69994]: _type = "Task" [ 1022.194255] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.201552] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.323968] env[69994]: DEBUG nova.scheduler.client.report [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.373921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85f1c679-48e8-4f50-b134-7be9921a11d2 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.804s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.374934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.345s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.375195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.375434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.375650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.377952] env[69994]: INFO nova.compute.manager [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Terminating instance [ 1022.453800] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1022.471986] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925954, 'name': CreateVM_Task, 'duration_secs': 0.399607} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.475438] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1022.477015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.477015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.477015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1022.481104] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61fa366b-c7fd-4014-953c-2b144ba60586 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.485849] env[69994]: DEBUG oslo_vmware.api [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Task: {'id': task-2925955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244949} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.487470] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1022.488107] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.488107] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1022.488219] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.488399] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1022.488624] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1022.488936] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1022.489571] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1022.489842] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1022.490085] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1022.490307] env[69994]: DEBUG nova.virt.hardware [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1022.491101] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.491374] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.492034] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.492319] env[69994]: INFO nova.compute.manager [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1022.492650] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.494025] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6653cadc-a6c6-4e85-b73d-0dbf36d838bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.498771] env[69994]: DEBUG nova.compute.manager [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1022.498902] env[69994]: DEBUG nova.network.neutron [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1022.504230] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1022.504230] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8d7d6-cd9b-c480-0d48-d6d8230b2cf0" [ 1022.504230] env[69994]: _type = "Task" [ 1022.504230] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.512625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da98d715-4200-4727-b709-b5e42e4676db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.526576] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8d7d6-cd9b-c480-0d48-d6d8230b2cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.706112] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925956, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.829486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.830135] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.833568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.601s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.833815] env[69994]: DEBUG nova.objects.instance [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lazy-loading 'resources' on Instance uuid 29326ab7-2b4b-42af-a90c-e86510bcd443 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.881924] env[69994]: DEBUG nova.compute.manager [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1022.882170] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.882474] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63795ab8-0cfd-4a6f-b40c-6240b1000326 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.893996] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1022.893996] env[69994]: value = "task-2925957" [ 1022.893996] env[69994]: _type = "Task" [ 1022.893996] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.910949] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.020207] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d8d7d6-cd9b-c480-0d48-d6d8230b2cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.082604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.020207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.020207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.020207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.020207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.020207] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.020207] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28bab2af-823d-44d3-8a28-47278ad4f7b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.028324] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.028536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.029326] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01a55049-114d-46e0-bc6b-b39b47931e8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.035426] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1023.035426] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c3b9f-0acd-e399-b0be-4b9c34c6edf1" [ 1023.035426] env[69994]: _type = "Task" [ 1023.035426] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.047619] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c3b9f-0acd-e399-b0be-4b9c34c6edf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.202207] env[69994]: DEBUG nova.compute.manager [req-a2f384fd-a369-400a-be7d-c30a8ffcd3d1 req-237e2a53-e926-4bfd-be61-414db23b9955 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Received event network-vif-deleted-b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1023.202318] env[69994]: INFO nova.compute.manager [req-a2f384fd-a369-400a-be7d-c30a8ffcd3d1 req-237e2a53-e926-4bfd-be61-414db23b9955 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Neutron deleted interface b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b; detaching it from the instance and deleting it from the info cache [ 1023.202545] env[69994]: DEBUG nova.network.neutron [req-a2f384fd-a369-400a-be7d-c30a8ffcd3d1 req-237e2a53-e926-4bfd-be61-414db23b9955 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.208616] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660716} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.209257] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.209595] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.210290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1ca246b-aae3-470f-8198-ca7562a386de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.218241] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1023.218241] env[69994]: value = "task-2925958" [ 1023.218241] env[69994]: _type = "Task" [ 1023.218241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.227148] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.337274] env[69994]: DEBUG nova.compute.utils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.343097] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1023.343097] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.398791] env[69994]: DEBUG nova.policy [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1023.408279] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925957, 'name': PowerOffVM_Task, 'duration_secs': 0.237925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.411301] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.411814] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1023.412162] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587574', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'name': 'volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f946992b-faf2-4580-adcd-806d3b8fd104', 'attached_at': '', 'detached_at': '', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'serial': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1023.418217] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319279a0-85c1-459b-91ad-e98d56136a49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.424477] env[69994]: DEBUG nova.compute.manager [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Received event network-vif-plugged-a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1023.424686] env[69994]: DEBUG oslo_concurrency.lockutils [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] Acquiring lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.424896] env[69994]: DEBUG oslo_concurrency.lockutils [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.425075] env[69994]: DEBUG oslo_concurrency.lockutils [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.425245] env[69994]: DEBUG nova.compute.manager [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] No waiting events found dispatching network-vif-plugged-a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1023.425403] env[69994]: WARNING nova.compute.manager [req-4b66a20e-f985-4e5e-b5ea-9a8c54e928fb req-d901aab8-18cc-4c3e-894f-222eea7b44c9 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Received unexpected event network-vif-plugged-a44e6223-be47-4cd2-87c7-44a1fb78bc1c for instance with vm_state building and task_state spawning. [ 1023.458175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2f4e85-b585-4cfd-ae5d-bdc5801421d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.468644] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbc7960-a43f-4b75-9613-a43e23b29222 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.500744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cb8341-2f24-4133-bb75-64c1d6ac03ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.519265] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] The volume has not been displaced from its original location: [datastore1] volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0/volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1023.524966] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1023.528121] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abddeb24-67b8-4b2f-a118-5c48c7e59e1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.553772] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c3b9f-0acd-e399-b0be-4b9c34c6edf1, 'name': SearchDatastore_Task, 'duration_secs': 0.017615} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.558057] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1023.558057] env[69994]: value = "task-2925959" [ 1023.558057] env[69994]: _type = "Task" [ 1023.558057] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.559418] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ce0e162-4462-4cec-a2b8-01ca49001da9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.567823] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1023.567823] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b328c6-2769-7076-c024-2a9a13376ab6" [ 1023.567823] env[69994]: _type = "Task" [ 1023.567823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.570702] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925959, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.582015] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b328c6-2769-7076-c024-2a9a13376ab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.694163] env[69994]: DEBUG nova.network.neutron [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.713086] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6270567d-bfda-48cb-ab64-da3d6e6cc04b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.728887] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e9bc5-d2a1-4cb2-9a0d-65c8c9147c3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.746687] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.747599] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Successfully created port: 984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.750367] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.751369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a8636c-4cbf-48b1-bc4b-a3fdd37de37c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.772553] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.789197] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5978ca56-aff5-4280-b8f7-6ff161d4ca7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.804299] env[69994]: DEBUG nova.compute.manager [req-a2f384fd-a369-400a-be7d-c30a8ffcd3d1 req-237e2a53-e926-4bfd-be61-414db23b9955 service nova] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Detach interface failed, port_id=b852a2dc-fa1a-43d5-95d9-9d982c5fbd9b, reason: Instance 660277f8-a7ff-43a9-8068-15e3db5a1069 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1023.810326] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1023.810326] env[69994]: value = "task-2925960" [ 1023.810326] env[69994]: _type = "Task" [ 1023.810326] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.818606] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.834091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3b2ba9-b91d-4da8-9934-cf23eb8fcda1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.842638] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.850026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4881708d-ce8c-46e1-b6c6-c469aef8e864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.879803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61e8f91-e76b-4239-b7a3-6744ee139d1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.887450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77723ee-11d9-4070-9893-7e31f2db875e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.903348] env[69994]: DEBUG nova.compute.provider_tree [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.962785] env[69994]: DEBUG nova.compute.manager [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1024.000761] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Successfully updated port: a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.075809] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925959, 'name': ReconfigVM_Task, 'duration_secs': 0.492718} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.080258] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1024.087199] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-336d24a4-7e2e-4942-bc2b-af1537b0e7c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.103662] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b328c6-2769-7076-c024-2a9a13376ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.022395} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.105152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.105443] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1024.105767] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1024.105767] env[69994]: value = "task-2925961" [ 1024.105767] env[69994]: _type = "Task" [ 1024.105767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.105961] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2fc1320-bb6d-46f6-98f4-ee08a0ca71de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.117055] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925961, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.118444] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1024.118444] env[69994]: value = "task-2925962" [ 1024.118444] env[69994]: _type = "Task" [ 1024.118444] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.127122] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.199053] env[69994]: INFO nova.compute.manager [-] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Took 1.70 seconds to deallocate network for instance. [ 1024.321670] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925960, 'name': ReconfigVM_Task, 'duration_secs': 0.498926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.321923] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.322594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b348cd11-11ff-4dd9-a276-c73d11232e5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.328771] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1024.328771] env[69994]: value = "task-2925963" [ 1024.328771] env[69994]: _type = "Task" [ 1024.328771] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.338256] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925963, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.406255] env[69994]: DEBUG nova.scheduler.client.report [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.481881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.503570] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.503760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquired lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.503900] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.621039] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925961, 'name': ReconfigVM_Task, 'duration_secs': 0.189805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.625095] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587574', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'name': 'volume-dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f946992b-faf2-4580-adcd-806d3b8fd104', 'attached_at': '', 'detached_at': '', 'volume_id': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0', 'serial': 'dcf0e539-f80c-4035-8888-0a3f0ceb66f0'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1024.625316] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.626716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22249dfa-933b-4b7d-a564-43464b2353b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.634087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.637344] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a32275a-eca2-4a19-be08-0bd42832c1bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.639374] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925962, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.704470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.720634] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.720905] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.721257] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore2] f946992b-faf2-4580-adcd-806d3b8fd104 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.721589] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d04c6ce-8445-4e3a-a492-2d589743198f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.727980] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1024.727980] env[69994]: value = "task-2925965" [ 1024.727980] env[69994]: _type = "Task" [ 1024.727980] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.736088] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.839634] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925963, 'name': Rename_Task, 'duration_secs': 0.484882} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.840027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.840244] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-019260ba-c38c-4700-906a-624b32e1ccc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.846409] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1024.846409] env[69994]: value = "task-2925966" [ 1024.846409] env[69994]: _type = "Task" [ 1024.846409] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.855431] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.857164] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.885301] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.885641] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.885860] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.886126] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.886349] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.886558] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.886956] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.887217] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.887440] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.887693] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.887932] env[69994]: DEBUG nova.virt.hardware [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.889044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0b6c52-6902-4a23-b27a-6f493e10cb73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.897316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143f83a1-c3b6-49e3-a693-e0fde0e0ef98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.911277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.913963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.432s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.936234] env[69994]: INFO nova.scheduler.client.report [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Deleted allocations for instance 29326ab7-2b4b-42af-a90c-e86510bcd443 [ 1025.036465] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.131044] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624314} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.131361] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.131577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.131838] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afc21b39-ef36-4aa8-b1f8-7313966d8cf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.139047] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1025.139047] env[69994]: value = "task-2925967" [ 1025.139047] env[69994]: _type = "Task" [ 1025.139047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.149317] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.236944] env[69994]: DEBUG nova.compute.manager [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Received event network-changed-a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1025.237280] env[69994]: DEBUG nova.compute.manager [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Refreshing instance network info cache due to event network-changed-a44e6223-be47-4cd2-87c7-44a1fb78bc1c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1025.237551] env[69994]: DEBUG oslo_concurrency.lockutils [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] Acquiring lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.244110] env[69994]: DEBUG oslo_vmware.api [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2925965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.244383] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.244601] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.244806] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.244995] env[69994]: INFO nova.compute.manager [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Took 2.36 seconds to destroy the instance on the hypervisor. [ 1025.245269] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.246248] env[69994]: DEBUG nova.network.neutron [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Updating instance_info_cache with network_info: [{"id": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "address": "fa:16:3e:31:6d:dd", "network": {"id": "685ad768-6fc8-43a6-ae72-c6239db7bd33", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1387544906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d78573882440eca91629924f6aa7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44e6223-be", "ovs_interfaceid": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.247382] env[69994]: DEBUG nova.compute.manager [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1025.247479] env[69994]: DEBUG nova.network.neutron [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1025.360070] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925966, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.418936] env[69994]: INFO nova.compute.claims [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.445214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3d384663-2c10-40df-abe2-1175a9f1712a tempest-InstanceActionsV221TestJSON-1176554191 tempest-InstanceActionsV221TestJSON-1176554191-project-member] Lock "29326ab7-2b4b-42af-a90c-e86510bcd443" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.468966] env[69994]: DEBUG nova.compute.manager [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Received event network-vif-plugged-984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1025.469208] env[69994]: DEBUG oslo_concurrency.lockutils [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] Acquiring lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.469426] env[69994]: DEBUG oslo_concurrency.lockutils [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.469593] env[69994]: DEBUG oslo_concurrency.lockutils [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.469867] env[69994]: DEBUG nova.compute.manager [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] No waiting events found dispatching network-vif-plugged-984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1025.469999] env[69994]: WARNING nova.compute.manager [req-add3ec33-21b3-4254-85b8-a602fa138747 req-4a04c660-735c-41f4-b701-ca6a44e08853 service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Received unexpected event network-vif-plugged-984ab9de-140f-4878-b423-5945bbb0353a for instance with vm_state building and task_state spawning. [ 1025.650992] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074319} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.651441] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1025.652265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a2a572-c632-4eff-9acc-5aef7f184251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.674964] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.675346] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed70bdc5-d2cf-4bf1-b1a8-263d3b7b8b04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.695445] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1025.695445] env[69994]: value = "task-2925968" [ 1025.695445] env[69994]: _type = "Task" [ 1025.695445] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.704516] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925968, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.750716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Releasing lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.751324] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Instance network_info: |[{"id": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "address": "fa:16:3e:31:6d:dd", "network": {"id": "685ad768-6fc8-43a6-ae72-c6239db7bd33", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1387544906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d78573882440eca91629924f6aa7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44e6223-be", "ovs_interfaceid": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1025.751450] env[69994]: DEBUG oslo_concurrency.lockutils [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] Acquired lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.751553] env[69994]: DEBUG nova.network.neutron [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Refreshing network info cache for port a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1025.752859] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:6d:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a44e6223-be47-4cd2-87c7-44a1fb78bc1c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.761914] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Creating folder: Project (23d78573882440eca91629924f6aa7c6). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1025.765594] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98df42d6-3901-4f70-b559-0ac892ff60fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.777125] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Created folder: Project (23d78573882440eca91629924f6aa7c6) in parent group-v587342. [ 1025.777550] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Creating folder: Instances. Parent ref: group-v587581. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1025.777976] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3e082ba-9bd0-481d-a885-b77bf324e7cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.787317] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Created folder: Instances in parent group-v587581. [ 1025.787456] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.788029] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.788029] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1631243-7fc3-488c-82a4-61fbc9e95ef8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.808876] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.808876] env[69994]: value = "task-2925971" [ 1025.808876] env[69994]: _type = "Task" [ 1025.808876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.817226] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925971, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.856973] env[69994]: DEBUG oslo_vmware.api [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925966, 'name': PowerOnVM_Task, 'duration_secs': 0.731165} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.859562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.859802] env[69994]: INFO nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Took 5.87 seconds to spawn the instance on the hypervisor. [ 1025.859991] env[69994]: DEBUG nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.860963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de708e7-8703-4333-a17a-86b946264b22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.926142] env[69994]: INFO nova.compute.resource_tracker [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating resource usage from migration 19953643-7357-4d7b-9f22-c7785db7cea6 [ 1026.025743] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Successfully updated port: 984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.030216] env[69994]: DEBUG nova.network.neutron [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Updated VIF entry in instance network info cache for port a44e6223-be47-4cd2-87c7-44a1fb78bc1c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.030701] env[69994]: DEBUG nova.network.neutron [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Updating instance_info_cache with network_info: [{"id": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "address": "fa:16:3e:31:6d:dd", "network": {"id": "685ad768-6fc8-43a6-ae72-c6239db7bd33", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1387544906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23d78573882440eca91629924f6aa7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44e6223-be", "ovs_interfaceid": "a44e6223-be47-4cd2-87c7-44a1fb78bc1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.119313] env[69994]: DEBUG nova.network.neutron [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.952377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.952377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.952377] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.952377] env[69994]: DEBUG oslo_concurrency.lockutils [req-a3ea99b2-a395-42e9-8094-850c0ed110c0 req-64f74107-ad0b-4aa1-a328-cae4dbf837f1 service nova] Releasing lock "refresh_cache-93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.952377] env[69994]: INFO nova.compute.manager [-] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Took 1.70 seconds to deallocate network for instance. [ 1026.966098] env[69994]: INFO nova.compute.manager [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Took 28.91 seconds to build instance. [ 1026.975783] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925968, 'name': ReconfigVM_Task, 'duration_secs': 0.590005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.981662] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 1ff25686-e13e-4003-909b-18bf919aa20c/1ff25686-e13e-4003-909b-18bf919aa20c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.982317] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925971, 'name': CreateVM_Task, 'duration_secs': 0.520622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.983355] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0ea3e03-c32e-472f-96eb-bb59d8b703cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.984869] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.986296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.986296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.986510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.987108] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3202059-03e0-4b3b-b1e5-63c69b887bd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.992380] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1026.992380] env[69994]: value = "task-2925972" [ 1026.992380] env[69994]: _type = "Task" [ 1026.992380] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.993788] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1026.993788] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529100a4-00c6-adc2-1e3a-0f2eb48661fc" [ 1026.993788] env[69994]: _type = "Task" [ 1026.993788] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.006559] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925972, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.009365] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529100a4-00c6-adc2-1e3a-0f2eb48661fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.057021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45beeae-fc55-42ea-a4d0-dfbe64c17324 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.066024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8ec0ee-0cd5-47fa-a13d-db165ce06452 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.094273] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759183ee-c106-492b-9606-e69e8e81be86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.102138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb241906-9e7a-459f-95fd-5397fe45cedf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.117787] env[69994]: DEBUG nova.compute.provider_tree [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.261339] env[69994]: DEBUG nova.compute.manager [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Received event network-vif-deleted-fcbc6909-efc0-4ccc-8b55-763a5b3a9c73 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.261339] env[69994]: DEBUG nova.compute.manager [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Received event network-changed-984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.261339] env[69994]: DEBUG nova.compute.manager [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Refreshing instance network info cache due to event network-changed-984ab9de-140f-4878-b423-5945bbb0353a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1027.261339] env[69994]: DEBUG oslo_concurrency.lockutils [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] Acquiring lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.458448] env[69994]: DEBUG nova.compute.manager [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.459823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e5a439-29b1-4a05-9bea-9cf044bd2fee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.466966] env[69994]: INFO nova.compute.manager [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Rebuilding instance [ 1027.470908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f52a7cf6-7fe0-4db1-859c-aabf451c0646 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.426s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.505873] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925972, 'name': Rename_Task, 'duration_secs': 0.225177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.506902] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.507177] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46a1552e-2ac7-42c3-a4de-df566bb4296f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.514471] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529100a4-00c6-adc2-1e3a-0f2eb48661fc, 'name': SearchDatastore_Task, 'duration_secs': 0.022442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.516799] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.517190] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.517293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.518040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.518040] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.518221] env[69994]: DEBUG nova.compute.manager [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.518338] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5efe6114-0394-4e87-af45-647d2b386484 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.520991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1746f1c8-c919-44ea-80ac-a518bb6ecd3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.524335] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1027.527518] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1027.527518] env[69994]: value = "task-2925973" [ 1027.527518] env[69994]: _type = "Task" [ 1027.527518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.536680] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.536866] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.537828] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32923792-9d75-4dac-b584-cb70dfeacfc0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.543486] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925973, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.545059] env[69994]: INFO nova.compute.manager [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Took 0.58 seconds to detach 1 volumes for instance. [ 1027.550408] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1027.550408] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d0c4f1-596f-38ee-0f95-a183dff80644" [ 1027.550408] env[69994]: _type = "Task" [ 1027.550408] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.561375] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d0c4f1-596f-38ee-0f95-a183dff80644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.640715] env[69994]: ERROR nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [req-3d55e616-3b21-40ac-9dad-4c7a37b14712] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3d55e616-3b21-40ac-9dad-4c7a37b14712"}]} [ 1027.662033] env[69994]: DEBUG nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1027.678301] env[69994]: DEBUG nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1027.678982] env[69994]: DEBUG nova.compute.provider_tree [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.691261] env[69994]: DEBUG nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1027.712277] env[69994]: DEBUG nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1027.732096] env[69994]: DEBUG nova.network.neutron [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Updating instance_info_cache with network_info: [{"id": "984ab9de-140f-4878-b423-5945bbb0353a", "address": "fa:16:3e:11:2d:30", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984ab9de-14", "ovs_interfaceid": "984ab9de-140f-4878-b423-5945bbb0353a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.855295] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1027.856228] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ee502-aeef-4805-8ae5-15f97d25190e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.865615] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1027.865824] env[69994]: ERROR oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk due to incomplete transfer. [ 1027.866180] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-62daca5b-a292-4654-85cc-c5cc7ff6acc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.875213] env[69994]: DEBUG oslo_vmware.rw_handles [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5212fbcc-1783-cb51-08f5-a61f3d028b9f/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1027.875417] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Uploaded image f3f8b9e0-bc3e-4bd4-800b-f48f29e25b1e to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1027.877998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1027.878281] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6cf47e95-6a64-400b-8777-2a75a8ca3eee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.886957] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1027.886957] env[69994]: value = "task-2925974" [ 1027.886957] env[69994]: _type = "Task" [ 1027.886957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.896019] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925974, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.970544] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b680ea-0cb8-496d-80c5-9096df5dcb17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.974201] env[69994]: INFO nova.compute.manager [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] instance snapshotting [ 1027.974727] env[69994]: DEBUG nova.objects.instance [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.980782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436fb1bf-0b8b-4cc9-abb0-7115ae510659 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.014689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2896f4-fd2b-45d6-884e-c6ea011a3a6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.022473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2af835-4f05-456c-aacd-91971853e93b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.042013] env[69994]: DEBUG nova.compute.provider_tree [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.049637] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925973, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.057232] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.061888] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d0c4f1-596f-38ee-0f95-a183dff80644, 'name': SearchDatastore_Task, 'duration_secs': 0.01814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.062674] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04243513-9929-4b08-b58e-06e832bf89f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.067934] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1028.067934] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52403a1e-1b7c-305f-57f5-c731d4878bde" [ 1028.067934] env[69994]: _type = "Task" [ 1028.067934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.077337] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52403a1e-1b7c-305f-57f5-c731d4878bde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.234563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.234999] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Instance network_info: |[{"id": "984ab9de-140f-4878-b423-5945bbb0353a", "address": "fa:16:3e:11:2d:30", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984ab9de-14", "ovs_interfaceid": "984ab9de-140f-4878-b423-5945bbb0353a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1028.235512] env[69994]: DEBUG oslo_concurrency.lockutils [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] Acquired lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.235664] env[69994]: DEBUG nova.network.neutron [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Refreshing network info cache for port 984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.237017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:2d:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '984ab9de-140f-4878-b423-5945bbb0353a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.246020] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.246131] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.246384] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-740440c7-ff18-45e2-b169-70b566955ee5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.270291] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.270291] env[69994]: value = "task-2925975" [ 1028.270291] env[69994]: _type = "Task" [ 1028.270291] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.283372] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925975, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.401510] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925974, 'name': Destroy_Task, 'duration_secs': 0.3305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.401614] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Destroyed the VM [ 1028.401887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1028.402389] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6aeb05c0-5359-4bbf-9876-357cb22da34f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.410764] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1028.410764] env[69994]: value = "task-2925976" [ 1028.410764] env[69994]: _type = "Task" [ 1028.410764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.419777] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925976, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.482092] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcccfd26-d9d4-4bb8-a456-65199395a54d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.502238] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf64b31-a16a-4da7-bf3e-1f35e93ffee9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.540233] env[69994]: DEBUG oslo_vmware.api [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925973, 'name': PowerOnVM_Task, 'duration_secs': 0.658614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.540587] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.540876] env[69994]: DEBUG nova.compute.manager [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.541889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9f76cf-0ca6-484f-bcc4-71f4d9fb50d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.544835] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.545097] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c31bd26c-fdae-4047-ab61-cf252cae9f16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.556196] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1028.556196] env[69994]: value = "task-2925977" [ 1028.556196] env[69994]: _type = "Task" [ 1028.556196] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.565282] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.578837] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52403a1e-1b7c-305f-57f5-c731d4878bde, 'name': SearchDatastore_Task, 'duration_secs': 0.017321} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.579142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.579423] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3/93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.579701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b023ac9-eab8-484e-8425-2d876b41ce80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.588025] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1028.588025] env[69994]: value = "task-2925978" [ 1028.588025] env[69994]: _type = "Task" [ 1028.588025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.592783] env[69994]: DEBUG nova.scheduler.client.report [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1028.593118] env[69994]: DEBUG nova.compute.provider_tree [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 122 to 123 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1028.593344] env[69994]: DEBUG nova.compute.provider_tree [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.602296] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.782996] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925975, 'name': CreateVM_Task, 'duration_secs': 0.383749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.783241] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.784384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.784617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.784954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.785372] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14c20f45-626e-4ab2-8e01-d6d6b92eb49d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.790872] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1028.790872] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526c1a71-1dbf-b1b6-9446-f9edcd31883e" [ 1028.790872] env[69994]: _type = "Task" [ 1028.790872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.799812] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526c1a71-1dbf-b1b6-9446-f9edcd31883e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.921708] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925976, 'name': RemoveSnapshot_Task} progress is 62%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.013087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1029.013428] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-af4f0319-9455-4109-bda6-b62003b14348 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.023101] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1029.023101] env[69994]: value = "task-2925979" [ 1029.023101] env[69994]: _type = "Task" [ 1029.023101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.035615] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925979, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.068917] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.074184] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925977, 'name': PowerOffVM_Task, 'duration_secs': 0.384708} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.077329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.078133] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.079070] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fe8fcf-401d-4b12-8bed-f80cf214e975 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.092411] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.096408] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2344e108-9e82-4417-a2f7-74f6c5c9e9a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.098844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.185s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.099140] env[69994]: INFO nova.compute.manager [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Migrating [ 1029.109676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.405s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.109937] env[69994]: DEBUG nova.objects.instance [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lazy-loading 'resources' on Instance uuid 660277f8-a7ff-43a9-8068-15e3db5a1069 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.127701] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925978, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.136613] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.136851] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.137208] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Deleting the datastore file [datastore2] 686feb53-00e2-43d9-b316-09c089df0891 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.137542] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0962c83-869d-4b65-8738-b2e8dd894e25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.146042] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1029.146042] env[69994]: value = "task-2925981" [ 1029.146042] env[69994]: _type = "Task" [ 1029.146042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.155855] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.168831] env[69994]: DEBUG nova.network.neutron [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Updated VIF entry in instance network info cache for port 984ab9de-140f-4878-b423-5945bbb0353a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.169300] env[69994]: DEBUG nova.network.neutron [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Updating instance_info_cache with network_info: [{"id": "984ab9de-140f-4878-b423-5945bbb0353a", "address": "fa:16:3e:11:2d:30", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap984ab9de-14", "ovs_interfaceid": "984ab9de-140f-4878-b423-5945bbb0353a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.303111] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526c1a71-1dbf-b1b6-9446-f9edcd31883e, 'name': SearchDatastore_Task, 'duration_secs': 0.020989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.303444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.303679] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.303920] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.304081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.304375] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.304533] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00b1ed7d-d57e-42d0-b7a1-35f55292c5dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.313737] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.313928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.314687] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6e1583c-ec65-4c67-ae84-c13aa3737fac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.320932] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1029.320932] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b97dea-9c19-3de6-28d4-e774cabf033c" [ 1029.320932] env[69994]: _type = "Task" [ 1029.320932] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.329350] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b97dea-9c19-3de6-28d4-e774cabf033c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.423610] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925976, 'name': RemoveSnapshot_Task} progress is 62%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.535643] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925979, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.599949] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.600554] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3/93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.600554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.600834] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a17a50a-94ff-419c-99ac-d6de4cd4a51f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.607854] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1029.607854] env[69994]: value = "task-2925982" [ 1029.607854] env[69994]: _type = "Task" [ 1029.607854] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.618187] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.627747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.627747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.627747] env[69994]: DEBUG nova.network.neutron [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.659759] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346797} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.662757] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.662870] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.663307] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.671551] env[69994]: DEBUG oslo_concurrency.lockutils [req-206bff56-eeaf-4c0d-93b6-e4a208808523 req-9f348aa0-f1ce-42d0-a170-6652cd55644a service nova] Releasing lock "refresh_cache-d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.757263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "1ff25686-e13e-4003-909b-18bf919aa20c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.757519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.757748] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "1ff25686-e13e-4003-909b-18bf919aa20c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.757943] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.758305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.760668] env[69994]: INFO nova.compute.manager [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Terminating instance [ 1029.835304] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b97dea-9c19-3de6-28d4-e774cabf033c, 'name': SearchDatastore_Task, 'duration_secs': 0.024591} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.842130] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3333b021-740a-4ef5-94a5-98ab7d8b93ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.851904] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1029.851904] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d5b04-a7e8-de8c-d04a-bf9c6317a96c" [ 1029.851904] env[69994]: _type = "Task" [ 1029.851904] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.868023] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d5b04-a7e8-de8c-d04a-bf9c6317a96c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.922802] env[69994]: DEBUG oslo_vmware.api [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925976, 'name': RemoveSnapshot_Task, 'duration_secs': 1.124016} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.923122] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1029.923418] env[69994]: INFO nova.compute.manager [None req-9bf770cc-7a6d-45ce-ae21-4da82053a78a tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 14.07 seconds to snapshot the instance on the hypervisor. [ 1029.931021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6b5f14-fdb3-4025-aeb8-0f1c84579fd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.938874] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78376a2-e3d1-436a-bcf3-afbed41bcef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.973220] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40f7461-f736-49be-a778-959396d0304c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.981348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0715eda6-a02e-44c5-a3f9-51b38f93f65b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.996320] env[69994]: DEBUG nova.compute.provider_tree [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.034665] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925979, 'name': CreateSnapshot_Task, 'duration_secs': 0.570969} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.034975] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1030.035664] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a308549-0d02-41b0-888c-b84e16a4f552 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.117929] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.314911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.118365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.119015] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6336547-6eae-47f5-802d-378d8a994c07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.144412] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3/93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.144412] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de722031-5510-4499-b7f1-37de6f4a7fd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.167038] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1030.167038] env[69994]: value = "task-2925983" [ 1030.167038] env[69994]: _type = "Task" [ 1030.167038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.178694] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925983, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.264493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "refresh_cache-1ff25686-e13e-4003-909b-18bf919aa20c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.264872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquired lock "refresh_cache-1ff25686-e13e-4003-909b-18bf919aa20c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.265123] env[69994]: DEBUG nova.network.neutron [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.363888] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526d5b04-a7e8-de8c-d04a-bf9c6317a96c, 'name': SearchDatastore_Task, 'duration_secs': 0.046455} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.365315] env[69994]: DEBUG nova.network.neutron [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.369809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.369809] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e/d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.369809] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72e6ad09-d443-4c5f-ba15-d11c463eaf2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.374853] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1030.374853] env[69994]: value = "task-2925984" [ 1030.374853] env[69994]: _type = "Task" [ 1030.374853] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.385998] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.503489] env[69994]: DEBUG nova.scheduler.client.report [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.557711] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1030.558909] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e09df91b-3830-4f48-8ff5-dc3ff6ab18dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.572345] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1030.572345] env[69994]: value = "task-2925985" [ 1030.572345] env[69994]: _type = "Task" [ 1030.572345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.584096] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925985, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.687020] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.715605] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1030.715952] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.716157] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.716353] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.716497] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.716678] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1030.716866] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1030.716989] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1030.717172] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1030.717333] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1030.717593] env[69994]: DEBUG nova.virt.hardware [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1030.718515] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8109b7c9-a0c4-45bc-94ae-1b8d9e4c4620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.726974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95e7102-e648-4a0a-b938-35f351657c18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.740447] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.746086] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1030.746403] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1030.746628] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddbd2176-9692-4581-be8e-24b75dfafbbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.764145] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1030.764145] env[69994]: value = "task-2925986" [ 1030.764145] env[69994]: _type = "Task" [ 1030.764145] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.773976] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925986, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.796216] env[69994]: DEBUG nova.network.neutron [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1030.869120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.874425] env[69994]: DEBUG nova.network.neutron [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.894198] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925984, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.010009] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.014398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.957s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.015076] env[69994]: DEBUG nova.objects.instance [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'resources' on Instance uuid f946992b-faf2-4580-adcd-806d3b8fd104 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.055834] env[69994]: INFO nova.scheduler.client.report [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Deleted allocations for instance 660277f8-a7ff-43a9-8068-15e3db5a1069 [ 1031.086871] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925985, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.186179] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925983, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.275823] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925986, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.382293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Releasing lock "refresh_cache-1ff25686-e13e-4003-909b-18bf919aa20c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.382293] env[69994]: DEBUG nova.compute.manager [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1031.382293] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.383922] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136ae0f4-006d-44ab-b1a7-395dc3ce82db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.396233] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774891} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.399383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e/d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.399383] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.399383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.399383] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d9420ec-c691-4200-9354-2cf78c6cc8e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.402548] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed936af2-377b-407f-ad0e-c1c6d3cf7d96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.411713] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1031.411713] env[69994]: value = "task-2925988" [ 1031.411713] env[69994]: _type = "Task" [ 1031.411713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.411999] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1031.411999] env[69994]: value = "task-2925987" [ 1031.411999] env[69994]: _type = "Task" [ 1031.411999] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.425790] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.429221] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.568622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e5882724-704e-4dc4-9fe6-c5ad1962945c tempest-ServersV294TestFqdnHostnames-1345478198 tempest-ServersV294TestFqdnHostnames-1345478198-project-member] Lock "660277f8-a7ff-43a9-8068-15e3db5a1069" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.730s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.582165] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925985, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.679830] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925983, 'name': ReconfigVM_Task, 'duration_secs': 1.029138} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.680277] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3/93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.681073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aca3c900-9127-4589-9c1a-762b8689ffb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.689871] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1031.689871] env[69994]: value = "task-2925989" [ 1031.689871] env[69994]: _type = "Task" [ 1031.689871] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.698272] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925989, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.778598] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2925986, 'name': CreateVM_Task, 'duration_secs': 0.566375} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.781191] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1031.781953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.782152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.782483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1031.782768] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-517a211f-8c04-47c1-820c-718a1b916d7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.788956] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1031.788956] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b8d1a9-bc4f-f1f7-5306-81b0ad98ed44" [ 1031.788956] env[69994]: _type = "Task" [ 1031.788956] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.798623] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b8d1a9-bc4f-f1f7-5306-81b0ad98ed44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.840340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa58bcff-07fb-4a87-9c17-dfc8b50a412e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.847869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf6f4af-c791-4a51-9513-42d1b6d68a1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.856051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "4d415c4d-54b2-4324-8e98-9dc476960348" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.856369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.896624] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18e10cc-f3a7-4285-94fb-51cf16afb7b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.899637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.899877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.911048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af5f965-14cb-48d3-85e9-979bc0af14d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.916565] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.916814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.936346] env[69994]: DEBUG nova.compute.provider_tree [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.944094] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.944370] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.945244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.946986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69d88ae-4987-49b5-92d8-42f56eee2af1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.971279] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e/d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.971903] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d20cb326-f111-45d3-8db2-9aec334116c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.992924] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1031.992924] env[69994]: value = "task-2925990" [ 1031.992924] env[69994]: _type = "Task" [ 1031.992924] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.002093] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.081411] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2925985, 'name': CloneVM_Task, 'duration_secs': 1.328272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.081712] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created linked-clone VM from snapshot [ 1032.082437] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5411f7a-efef-4160-a7e8-4fecc79f2f1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.090143] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploading image c18ad5ff-2d6b-46c7-9cf7-a9e4b3b5d13f {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1032.113458] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1032.113458] env[69994]: value = "vm-587586" [ 1032.113458] env[69994]: _type = "VirtualMachine" [ 1032.113458] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1032.113794] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d2ca8a0a-b5c8-45b3-b9ec-c1f2faada4f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.121692] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease: (returnval){ [ 1032.121692] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52282dfb-f62f-c55a-9c58-ca73f5cab347" [ 1032.121692] env[69994]: _type = "HttpNfcLease" [ 1032.121692] env[69994]: } obtained for exporting VM: (result){ [ 1032.121692] env[69994]: value = "vm-587586" [ 1032.121692] env[69994]: _type = "VirtualMachine" [ 1032.121692] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1032.121692] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the lease: (returnval){ [ 1032.121692] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52282dfb-f62f-c55a-9c58-ca73f5cab347" [ 1032.121692] env[69994]: _type = "HttpNfcLease" [ 1032.121692] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1032.127619] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1032.127619] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52282dfb-f62f-c55a-9c58-ca73f5cab347" [ 1032.127619] env[69994]: _type = "HttpNfcLease" [ 1032.127619] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1032.199650] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925989, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.304190] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b8d1a9-bc4f-f1f7-5306-81b0ad98ed44, 'name': SearchDatastore_Task, 'duration_secs': 0.019643} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.304457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.304630] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.304868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.305016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.305286] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.305655] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0feba4df-6d3c-437c-a915-27d518c66236 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.314349] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.314481] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1032.318367] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34770e53-4df2-4d71-b724-5597af7421eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.323930] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1032.323930] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b11160-a3bc-ab7d-d846-f6f01b60dd30" [ 1032.323930] env[69994]: _type = "Task" [ 1032.323930] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.332554] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b11160-a3bc-ab7d-d846-f6f01b60dd30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.360633] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.397785] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c214622-4449-47e0-90ed-e209c81c54a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.402818] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.427193] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.432567] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1032.449029] env[69994]: DEBUG nova.scheduler.client.report [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.456019] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925988, 'name': PowerOffVM_Task, 'duration_secs': 1.011322} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.456019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.456019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.456019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0abb97d9-813f-43a4-8418-a859bc48bf8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.480843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.480843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.485017] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Deleting the datastore file [datastore2] 1ff25686-e13e-4003-909b-18bf919aa20c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.485017] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebcf25d5-7a0f-40bf-a768-b4817cfa9b09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.489475] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for the task: (returnval){ [ 1032.489475] env[69994]: value = "task-2925993" [ 1032.489475] env[69994]: _type = "Task" [ 1032.489475] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.502233] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.505387] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925990, 'name': ReconfigVM_Task, 'duration_secs': 0.28719} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.505679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Reconfigured VM instance instance-00000057 to attach disk [datastore2] d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e/d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.506304] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-615da35e-0a38-46e9-b695-76cbd9249584 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.512184] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1032.512184] env[69994]: value = "task-2925994" [ 1032.512184] env[69994]: _type = "Task" [ 1032.512184] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.521201] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925994, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.633154] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1032.633154] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52282dfb-f62f-c55a-9c58-ca73f5cab347" [ 1032.633154] env[69994]: _type = "HttpNfcLease" [ 1032.633154] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1032.633154] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1032.633154] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52282dfb-f62f-c55a-9c58-ca73f5cab347" [ 1032.633154] env[69994]: _type = "HttpNfcLease" [ 1032.633154] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1032.633154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558c1aae-ef19-470b-9529-3b3641cac73d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.642313] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1032.642313] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1032.718829] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925989, 'name': Rename_Task, 'duration_secs': 0.757067} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.719539] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.719905] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5702235d-6c1e-4b6c-a520-c74cd9c4d703 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.727243] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1032.727243] env[69994]: value = "task-2925995" [ 1032.727243] env[69994]: _type = "Task" [ 1032.727243] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.737028] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.738742] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-73cac680-00c7-4bc3-9ced-b37362cfc73d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.833970] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b11160-a3bc-ab7d-d846-f6f01b60dd30, 'name': SearchDatastore_Task, 'duration_secs': 0.011523} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.834933] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78847a3e-e226-4550-8986-21d09522e307 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.840387] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1032.840387] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb43f2-6f17-ddde-f355-346c94b5bf6f" [ 1032.840387] env[69994]: _type = "Task" [ 1032.840387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.848609] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb43f2-6f17-ddde-f355-346c94b5bf6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.895555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.928351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.937507] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.941726] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2de78f75-c114-4e33-bc37-23dc75e1564a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.951870] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1032.951870] env[69994]: value = "task-2925996" [ 1032.951870] env[69994]: _type = "Task" [ 1032.951870] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.958218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.959419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.959940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.891s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.960141] env[69994]: DEBUG nova.objects.instance [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1032.968622] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.986693] env[69994]: INFO nova.scheduler.client.report [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance f946992b-faf2-4580-adcd-806d3b8fd104 [ 1033.000608] env[69994]: DEBUG oslo_vmware.api [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Task: {'id': task-2925993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096669} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.001284] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.001692] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.001899] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.002087] env[69994]: INFO nova.compute.manager [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1033.002353] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.002723] env[69994]: DEBUG nova.compute.manager [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1033.002816] env[69994]: DEBUG nova.network.neutron [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1033.024224] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925994, 'name': Rename_Task, 'duration_secs': 0.150958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.025157] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.025281] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b76a6b7-d6f6-4566-b6bf-b64bbe61ebab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.029389] env[69994]: DEBUG nova.network.neutron [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.033712] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1033.033712] env[69994]: value = "task-2925997" [ 1033.033712] env[69994]: _type = "Task" [ 1033.033712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.044253] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.223761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.223761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.223761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.223761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.223761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.225308] env[69994]: INFO nova.compute.manager [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Terminating instance [ 1033.241219] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925995, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.353203] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cb43f2-6f17-ddde-f355-346c94b5bf6f, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.354797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.355392] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1033.355702] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39f5e67e-a2bc-4b2b-937c-af43139e0ede {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.363829] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1033.363829] env[69994]: value = "task-2925998" [ 1033.363829] env[69994]: _type = "Task" [ 1033.363829] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.375924] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.462727] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2925996, 'name': PowerOffVM_Task, 'duration_secs': 0.187032} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.463329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.463677] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1033.497098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6702b10-31a1-437b-97e9-df639f1a021b tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "f946992b-faf2-4580-adcd-806d3b8fd104" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.122s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.536267] env[69994]: DEBUG nova.network.neutron [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.565294] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925997, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.736232] env[69994]: DEBUG nova.compute.manager [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1033.736625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.741716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02676a08-e73f-4641-9e15-9528298b3108 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.745334] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925995, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.750226] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.750533] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9c0dcb1-a43c-429e-ad2a-724fa9a0d122 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.758569] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1033.758569] env[69994]: value = "task-2925999" [ 1033.758569] env[69994]: _type = "Task" [ 1033.758569] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.768853] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.881745] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925998, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.974310] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.974615] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.975956] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.976242] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.976465] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.976636] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.976863] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.977051] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.977233] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.977483] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.977698] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.986897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-302cf856-2b49-450e-aa51-110d9423d167 tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.990404] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22255def-8d65-435c-885b-9196c78cf62a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.002677] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.107s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.004440] env[69994]: INFO nova.compute.claims [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.014732] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1034.014732] env[69994]: value = "task-2926000" [ 1034.014732] env[69994]: _type = "Task" [ 1034.014732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.024796] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926000, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.045718] env[69994]: INFO nova.compute.manager [-] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Took 1.04 seconds to deallocate network for instance. [ 1034.061128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.064293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.004s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.064839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.065071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.065320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.069863] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925997, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.069863] env[69994]: INFO nova.compute.manager [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Terminating instance [ 1034.240721] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925995, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.268382] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925999, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.375897] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2925998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539001} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.376398] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1034.376727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1034.377115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ff947dc-51c8-43de-b932-14008071a7ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.384518] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1034.384518] env[69994]: value = "task-2926001" [ 1034.384518] env[69994]: _type = "Task" [ 1034.384518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.394734] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.533057] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.553753] env[69994]: DEBUG oslo_vmware.api [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2925997, 'name': PowerOnVM_Task, 'duration_secs': 1.314894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.554331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.554781] env[69994]: INFO nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Took 9.70 seconds to spawn the instance on the hypervisor. [ 1034.554992] env[69994]: DEBUG nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.555934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64eda1e9-1da6-4549-b936-2369ced4ba90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.569674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.572920] env[69994]: DEBUG nova.compute.manager [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1034.574266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.574266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e58fe3-1612-4749-9c2e-344bb953e9ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.582039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.582827] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1735da35-4ff1-46ec-b4d3-ec5183fc56d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.591079] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1034.591079] env[69994]: value = "task-2926002" [ 1034.591079] env[69994]: _type = "Task" [ 1034.591079] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.601465] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.742161] env[69994]: DEBUG oslo_vmware.api [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2925995, 'name': PowerOnVM_Task, 'duration_secs': 1.613508} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.742569] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.742777] env[69994]: INFO nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Took 12.29 seconds to spawn the instance on the hypervisor. [ 1034.743056] env[69994]: DEBUG nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.743917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad217bec-1d97-41d7-93df-386f5c3a9b5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.768555] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2925999, 'name': PowerOffVM_Task, 'duration_secs': 0.708664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.769896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.769896] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.769896] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64ae8e71-39c2-4220-8f17-191351c99d2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.840540] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.840918] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.841852] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleting the datastore file [datastore2] c98308b3-2431-4f17-9022-bcd9f1e83a35 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.841852] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89d18f16-0f1f-4024-b44a-33037c384ceb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.848785] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1034.848785] env[69994]: value = "task-2926004" [ 1034.848785] env[69994]: _type = "Task" [ 1034.848785] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.858722] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.894311] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096693} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.894660] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1034.895607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cda236b-e479-475f-ae08-16e7d04bc2ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.917118] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1034.917424] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bac40d1-a085-4d1c-bb01-825352f16e31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.939120] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1034.939120] env[69994]: value = "task-2926005" [ 1034.939120] env[69994]: _type = "Task" [ 1034.939120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.950045] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.028514] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926000, 'name': ReconfigVM_Task, 'duration_secs': 0.561927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.029046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1035.079195] env[69994]: INFO nova.compute.manager [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Took 28.36 seconds to build instance. [ 1035.098615] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926002, 'name': PowerOffVM_Task, 'duration_secs': 0.336396} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.101925] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.101992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.102994] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38b3ba0a-866e-47fe-a750-2bb88355b6f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.177233] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.177233] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.177496] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleting the datastore file [datastore1] 5acdf02b-f61c-46ff-9c36-8e86b9be7738 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.177823] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fef952e-298e-4bdb-af62-1d5e21f3733a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.192606] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1035.192606] env[69994]: value = "task-2926007" [ 1035.192606] env[69994]: _type = "Task" [ 1035.192606] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.204014] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.264449] env[69994]: INFO nova.compute.manager [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Took 34.39 seconds to build instance. [ 1035.340379] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57859be3-906c-4fdb-a5a4-081054c1c445 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.349472] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b200ca4-d182-4b5b-92e2-db43ed76b282 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.363016] env[69994]: DEBUG oslo_vmware.api [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296351} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.386172] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.386412] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.386613] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.386790] env[69994]: INFO nova.compute.manager [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1035.387068] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.387495] env[69994]: DEBUG nova.compute.manager [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1035.387598] env[69994]: DEBUG nova.network.neutron [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1035.389789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafcedf3-ef84-42d1-9c49-bde4a42c0e9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.398507] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d7a50e-c1cf-4ac0-8750-af987c46106d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.415356] env[69994]: DEBUG nova.compute.provider_tree [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.432578] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.432816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.448299] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926005, 'name': ReconfigVM_Task, 'duration_secs': 0.41593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.448884] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 686feb53-00e2-43d9-b316-09c089df0891/686feb53-00e2-43d9-b316-09c089df0891.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1035.449808] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6082e76f-5cdc-409f-aba5-8f67ca930584 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.457526] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1035.457526] env[69994]: value = "task-2926008" [ 1035.457526] env[69994]: _type = "Task" [ 1035.457526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.468397] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926008, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.535834] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1035.536086] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.536254] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1035.536434] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.536588] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1035.536777] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1035.536993] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1035.537175] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1035.537350] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1035.537501] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1035.537667] env[69994]: DEBUG nova.virt.hardware [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1035.545364] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1035.545792] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fde54a41-11e1-4023-827f-9230bca4d530 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.567299] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1035.567299] env[69994]: value = "task-2926009" [ 1035.567299] env[69994]: _type = "Task" [ 1035.567299] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.575272] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926009, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.581867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-247b28a9-3196-461f-a025-ce829f173473 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.875s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.682157] env[69994]: DEBUG nova.compute.manager [req-fccd38ee-755c-4cde-ae14-6d8f1ee8a73c req-bb6fdecb-26fa-45c7-bc76-51e1d7458ae6 service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Received event network-vif-deleted-93ff6adf-86c8-4337-bed8-32c58f7afb15 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.682373] env[69994]: INFO nova.compute.manager [req-fccd38ee-755c-4cde-ae14-6d8f1ee8a73c req-bb6fdecb-26fa-45c7-bc76-51e1d7458ae6 service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Neutron deleted interface 93ff6adf-86c8-4337-bed8-32c58f7afb15; detaching it from the instance and deleting it from the info cache [ 1035.682690] env[69994]: DEBUG nova.network.neutron [req-fccd38ee-755c-4cde-ae14-6d8f1ee8a73c req-bb6fdecb-26fa-45c7-bc76-51e1d7458ae6 service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.704499] env[69994]: DEBUG oslo_vmware.api [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.705017] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.706032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.706032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.706032] env[69994]: INFO nova.compute.manager [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1035.706032] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.706366] env[69994]: DEBUG nova.compute.manager [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1035.706366] env[69994]: DEBUG nova.network.neutron [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1035.737749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.738035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.767536] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02c54ac5-8061-40c3-8013-e07d4f071304 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.896s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.922471] env[69994]: DEBUG nova.scheduler.client.report [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.936879] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1035.939905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.940676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.940902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.941108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.941312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.947027] env[69994]: INFO nova.compute.manager [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Terminating instance [ 1035.967781] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926008, 'name': Rename_Task, 'duration_secs': 0.194288} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.969987] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.969987] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bbd5572-2ef3-4287-b8d6-8a10a5455165 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.976753] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1035.976753] env[69994]: value = "task-2926010" [ 1035.976753] env[69994]: _type = "Task" [ 1035.976753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.985756] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.080192] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926009, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.156554] env[69994]: DEBUG nova.network.neutron [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.185538] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94ea1e6a-3309-4a63-8de7-e054c90d05cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.195558] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9109e932-013a-49a9-85d0-087bb2c649c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.234032] env[69994]: DEBUG nova.compute.manager [req-fccd38ee-755c-4cde-ae14-6d8f1ee8a73c req-bb6fdecb-26fa-45c7-bc76-51e1d7458ae6 service nova] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Detach interface failed, port_id=93ff6adf-86c8-4337-bed8-32c58f7afb15, reason: Instance c98308b3-2431-4f17-9022-bcd9f1e83a35 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1036.240701] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1036.428077] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.428693] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.433437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.505s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.433899] env[69994]: INFO nova.compute.claims [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1036.449388] env[69994]: DEBUG nova.compute.manager [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.449388] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.452704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33cfec5-63bb-4923-b49e-fbc44d96a7f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.464024] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.464024] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b7204a8-07e5-4e8d-84c7-b3881d753aac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.468093] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1036.468093] env[69994]: value = "task-2926011" [ 1036.468093] env[69994]: _type = "Task" [ 1036.468093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.477460] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.478542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.489266] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926010, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.579993] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926009, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.590718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.591421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.591693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.591939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.592175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.594630] env[69994]: INFO nova.compute.manager [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Terminating instance [ 1036.652026] env[69994]: DEBUG nova.network.neutron [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.662316] env[69994]: INFO nova.compute.manager [-] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Took 1.27 seconds to deallocate network for instance. [ 1036.768428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.783435] env[69994]: DEBUG nova.objects.instance [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lazy-loading 'flavor' on Instance uuid e1c00159-d198-4858-b5a3-aa05152b1fda {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.941451] env[69994]: DEBUG nova.compute.utils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1036.942951] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1036.943136] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1036.978471] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926011, 'name': PowerOffVM_Task, 'duration_secs': 0.259364} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.978770] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.978946] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.979300] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21ff2923-5ba6-46cb-8e3e-b6624c6f6721 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.990897] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926010, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.001596] env[69994]: DEBUG nova.policy [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddbe228af4ae4cdc8483c4ee5a4ee841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c4d971390cf41fa93029cb4418c8ef8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.058394] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.058637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.058811] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore2] d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.059115] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed044728-b0fb-4b55-9c94-bfadcc1c247c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.065204] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1037.065204] env[69994]: value = "task-2926013" [ 1037.065204] env[69994]: _type = "Task" [ 1037.065204] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.080640] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.080898] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926009, 'name': ReconfigVM_Task, 'duration_secs': 1.21595} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.081156] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1037.081969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5939a06-779a-4d22-b790-a837284b0c96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.099232] env[69994]: DEBUG nova.compute.manager [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1037.099479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.107534] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.108349] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e02a1b1-c3d2-455c-842c-661c15e262ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.111355] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b398870-676d-4a70-81ba-d67c7da1b595 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.130133] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.131449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58502e37-aa7e-44cb-9ea8-730df8293336 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.133105] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1037.133105] env[69994]: value = "task-2926014" [ 1037.133105] env[69994]: _type = "Task" [ 1037.133105] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.137343] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1037.137343] env[69994]: value = "task-2926015" [ 1037.137343] env[69994]: _type = "Task" [ 1037.137343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.144161] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926014, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.149675] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2926015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.154314] env[69994]: INFO nova.compute.manager [-] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Took 1.45 seconds to deallocate network for instance. [ 1037.166446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.291620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.291835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.443073] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Successfully created port: bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.448394] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1037.491839] env[69994]: DEBUG oslo_vmware.api [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926010, 'name': PowerOnVM_Task, 'duration_secs': 1.426819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.492147] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.492763] env[69994]: DEBUG nova.compute.manager [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.493431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0372dc3-97f1-40f4-9b1f-a7482a526b9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.580057] env[69994]: DEBUG oslo_vmware.api [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.581201] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.581201] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.581201] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.581201] env[69994]: INFO nova.compute.manager [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1037.581393] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.581571] env[69994]: DEBUG nova.compute.manager [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.581624] env[69994]: DEBUG nova.network.neutron [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.645376] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926014, 'name': ReconfigVM_Task, 'duration_secs': 0.378851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.648489] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529/5b9648a7-f26f-4151-be5c-59991035a529.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.648790] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1037.655411] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2926015, 'name': PowerOffVM_Task, 'duration_secs': 0.207248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.656061] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.656135] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.656390] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd3f6d9e-16b7-46f3-85b1-2ff33d3f78c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.662371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.713530] env[69994]: DEBUG nova.compute.manager [req-47d638e5-82de-4abd-be2e-e27a5597ea2d req-7fc884f2-0c7c-4e52-a4af-1039ed329332 service nova] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Received event network-vif-deleted-381f1b5f-fbf6-499e-afb0-d63ec11e7e21 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.723206] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.723759] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.723759] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Deleting the datastore file [datastore2] 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.727532] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a758ab45-d884-41f6-9520-b2f241e6ab59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.734151] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for the task: (returnval){ [ 1037.734151] env[69994]: value = "task-2926017" [ 1037.734151] env[69994]: _type = "Task" [ 1037.734151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.743774] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2926017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.835557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b132ab5-281a-4985-b855-f7603ecc6435 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.843981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f1de9-c916-45b4-95a0-c50ac6db17a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.879884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d90461-093e-4abe-83e6-4c299b828e6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.889189] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b15098-dc0d-44e9-a588-d659836a6e94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.904320] env[69994]: DEBUG nova.compute.provider_tree [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.912330] env[69994]: DEBUG nova.compute.manager [req-f01e77ff-8bb5-4b8e-a884-8a78556509c4 req-bb32a6e7-5388-41df-bccf-8c427b13a5da service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Received event network-vif-deleted-984ab9de-140f-4878-b423-5945bbb0353a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.912461] env[69994]: INFO nova.compute.manager [req-f01e77ff-8bb5-4b8e-a884-8a78556509c4 req-bb32a6e7-5388-41df-bccf-8c427b13a5da service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Neutron deleted interface 984ab9de-140f-4878-b423-5945bbb0353a; detaching it from the instance and deleting it from the info cache [ 1037.912804] env[69994]: DEBUG nova.network.neutron [req-f01e77ff-8bb5-4b8e-a884-8a78556509c4 req-bb32a6e7-5388-41df-bccf-8c427b13a5da service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.936052] env[69994]: DEBUG nova.network.neutron [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.016826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.163018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db52b4c4-db14-4243-afe7-318b315fea75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.182666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ff3ff3-f9cf-47d4-863e-4ff4a0da163f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.201380] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1038.244574] env[69994]: DEBUG oslo_vmware.api [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Task: {'id': task-2926017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.438851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.246482] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.246674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.246853] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.247069] env[69994]: INFO nova.compute.manager [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1038.247336] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.250478] env[69994]: DEBUG nova.compute.manager [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1038.250616] env[69994]: DEBUG nova.network.neutron [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.394983] env[69994]: DEBUG nova.network.neutron [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.408462] env[69994]: DEBUG nova.scheduler.client.report [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.414850] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99afb169-61f0-4be1-a3c2-67d089148813 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.428050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd05483-ab8f-40c2-b115-653fe54a3190 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.475841] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1038.479381] env[69994]: DEBUG nova.compute.manager [req-f01e77ff-8bb5-4b8e-a884-8a78556509c4 req-bb32a6e7-5388-41df-bccf-8c427b13a5da service nova] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Detach interface failed, port_id=984ab9de-140f-4878-b423-5945bbb0353a, reason: Instance d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1038.512336] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.512760] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.512980] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.513246] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.513458] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.513693] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.513957] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.514172] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.514396] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.514716] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.514782] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.516141] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36728d21-f9e7-456c-8129-f5fece76ecbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.527738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14ed975-23d0-4355-8e37-5fa1be780926 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.738104] env[69994]: DEBUG nova.network.neutron [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.740375] env[69994]: DEBUG nova.network.neutron [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Port ad28c14f-638f-4073-b494-cb6a2a579dab binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1038.753876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "686feb53-00e2-43d9-b316-09c089df0891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.754171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.754502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "686feb53-00e2-43d9-b316-09c089df0891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.754768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.754943] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.757159] env[69994]: INFO nova.compute.manager [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Terminating instance [ 1038.900912] env[69994]: INFO nova.compute.manager [-] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Took 1.32 seconds to deallocate network for instance. [ 1038.915645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.916253] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1038.919249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.960s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.920848] env[69994]: INFO nova.compute.claims [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.003296] env[69994]: DEBUG nova.network.neutron [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.068119] env[69994]: DEBUG nova.objects.instance [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lazy-loading 'flavor' on Instance uuid e1c00159-d198-4858-b5a3-aa05152b1fda {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.211774] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Successfully updated port: bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.243108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.244057] env[69994]: DEBUG nova.compute.manager [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1039.244920] env[69994]: DEBUG nova.compute.manager [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] network_info to inject: |[{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1039.249717] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1039.254324] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc84d46f-72f5-4ca9-b74f-6eb51653827d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.267199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "refresh_cache-686feb53-00e2-43d9-b316-09c089df0891" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.267428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquired lock "refresh_cache-686feb53-00e2-43d9-b316-09c089df0891" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.267619] env[69994]: DEBUG nova.network.neutron [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.274782] env[69994]: DEBUG oslo_vmware.api [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 1039.274782] env[69994]: value = "task-2926018" [ 1039.274782] env[69994]: _type = "Task" [ 1039.274782] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.286463] env[69994]: DEBUG oslo_vmware.api [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.408581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.426291] env[69994]: DEBUG nova.compute.utils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.430738] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.430738] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.487268] env[69994]: DEBUG nova.policy [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddbe228af4ae4cdc8483c4ee5a4ee841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c4d971390cf41fa93029cb4418c8ef8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1039.506819] env[69994]: INFO nova.compute.manager [-] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Took 1.26 seconds to deallocate network for instance. [ 1039.577542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.577542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.714876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.715288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.715520] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.755902] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1039.757043] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing instance network info cache due to event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1039.757363] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.786281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.786556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.786762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.799267] env[69994]: DEBUG oslo_vmware.api [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926018, 'name': ReconfigVM_Task, 'duration_secs': 0.182449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.799629] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cde0f4cc-0c0d-439f-8b33-11585f8b7734 tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1039.820019] env[69994]: DEBUG nova.network.neutron [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.875485] env[69994]: DEBUG nova.network.neutron [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.931837] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.018768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.051283] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Successfully created port: dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.191276] env[69994]: DEBUG nova.network.neutron [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.268272] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.283237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee57e649-fb13-4cc9-b703-0cbcbaab536a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.293841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4864e9-9f67-44b6-bc5e-e17def85803a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.331948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b794b7b-6be1-4a75-b9c2-e579003748c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.340539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d925a94a-e41c-4680-a095-915711f9c33a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.355216] env[69994]: DEBUG nova.compute.provider_tree [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.379061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Releasing lock "refresh_cache-686feb53-00e2-43d9-b316-09c089df0891" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.379613] env[69994]: DEBUG nova.compute.manager [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.379833] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.380894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee724c3d-f145-424d-80e3-6d70bb592de2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.389668] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.390014] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11af64c9-9e75-43fc-b9f5-a9aed8132282 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.396623] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1040.396623] env[69994]: value = "task-2926019" [ 1040.396623] env[69994]: _type = "Task" [ 1040.396623] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.410583] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.505921] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Updating instance_info_cache with network_info: [{"id": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "address": "fa:16:3e:30:3b:e5", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1a6a8e-ce", "ovs_interfaceid": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.819620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.819883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.820121] env[69994]: DEBUG nova.network.neutron [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.862474] env[69994]: DEBUG nova.scheduler.client.report [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.907311] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926019, 'name': PowerOffVM_Task, 'duration_secs': 0.123782} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.907588] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.907757] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.908039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ad0e5fe-d4d7-4ce5-b763-9780de124a47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.935856] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.936347] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.936546] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Deleting the datastore file [datastore2] 686feb53-00e2-43d9-b316-09c089df0891 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.936817] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68d7a134-6241-4bc7-bb62-2242652a3310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.943022] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for the task: (returnval){ [ 1040.943022] env[69994]: value = "task-2926021" [ 1040.943022] env[69994]: _type = "Task" [ 1040.943022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.948921] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1040.956491] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.973240] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1040.973550] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.974132] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1040.974489] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.974713] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1040.974899] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1040.975146] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1040.975330] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1040.975615] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1040.975807] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1040.975986] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1040.976849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673ba309-21c5-45ad-a01d-523d9f2056a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.985803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bd755e-9246-4058-be48-a087c137b576 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.008115] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1041.008991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047bb6c3-df62-48c0-9001-be64daacc412 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.011610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.011926] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Instance network_info: |[{"id": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "address": "fa:16:3e:30:3b:e5", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1a6a8e-ce", "ovs_interfaceid": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1041.012349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:3b:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc1a6a8e-cea0-48bf-96b9-150002406dfc', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.019827] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Creating folder: Project (9c4d971390cf41fa93029cb4418c8ef8). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1041.020789] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61d7e44e-aa26-4166-b53e-4b50c35bab56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.023954] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1041.024175] env[69994]: ERROR oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk due to incomplete transfer. [ 1041.024735] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7b31cb97-0da4-4049-a947-685f39cacf1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.032432] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Created folder: Project (9c4d971390cf41fa93029cb4418c8ef8) in parent group-v587342. [ 1041.032657] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Creating folder: Instances. Parent ref: group-v587588. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1041.034340] env[69994]: DEBUG nova.network.neutron [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.035565] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93eb4868-4406-44f4-9376-868a0ac90ac5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.037014] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525768f1-2d5f-7655-2235-ad286e326d98/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1041.037205] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploaded image c18ad5ff-2d6b-46c7-9cf7-a9e4b3b5d13f to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1041.039895] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1041.040473] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f623e0b2-96c7-429f-bcae-eb9301bcc346 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.045995] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1041.045995] env[69994]: value = "task-2926023" [ 1041.045995] env[69994]: _type = "Task" [ 1041.045995] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.050898] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Created folder: Instances in parent group-v587588. [ 1041.051159] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.053825] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.054281] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926023, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.054478] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea805ed3-38c9-4d01-979e-954ad233143c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.072608] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.072608] env[69994]: value = "task-2926025" [ 1041.072608] env[69994]: _type = "Task" [ 1041.072608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.079563] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926025, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.367749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.368407] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1041.370984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.801s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.371220] env[69994]: DEBUG nova.objects.instance [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lazy-loading 'resources' on Instance uuid 1ff25686-e13e-4003-909b-18bf919aa20c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.452985] env[69994]: DEBUG oslo_vmware.api [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Task: {'id': task-2926021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088557} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.453304] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.453498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.453678] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.453853] env[69994]: INFO nova.compute.manager [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1041.454110] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.454314] env[69994]: DEBUG nova.compute.manager [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.454405] env[69994]: DEBUG nova.network.neutron [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.469050] env[69994]: DEBUG nova.network.neutron [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.542638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.542638] env[69994]: DEBUG nova.compute.manager [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1041.542638] env[69994]: DEBUG nova.compute.manager [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] network_info to inject: |[{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1041.549710] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1041.551210] env[69994]: DEBUG nova.network.neutron [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.552282] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.552510] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.553848] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a6ee76c-96d7-41c1-a3f8-3f916b9dec79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.574554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.602079] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926023, 'name': Destroy_Task, 'duration_secs': 0.351449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.606068] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroyed the VM [ 1041.606323] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1041.606966] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926025, 'name': CreateVM_Task, 'duration_secs': 0.365562} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.606966] env[69994]: DEBUG oslo_vmware.api [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 1041.606966] env[69994]: value = "task-2926026" [ 1041.606966] env[69994]: _type = "Task" [ 1041.606966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.607970] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5432a540-6d6a-4532-9a02-04c015accaee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.610099] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.611326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.611512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.611834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.615140] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d50243b9-56cf-4ea2-873a-44c4de8189f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.619393] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1041.619393] env[69994]: value = "task-2926027" [ 1041.619393] env[69994]: _type = "Task" [ 1041.619393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.628235] env[69994]: DEBUG oslo_vmware.api [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.628579] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1041.628579] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529641ec-cb19-545f-3350-96f4dcef04fd" [ 1041.628579] env[69994]: _type = "Task" [ 1041.628579] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.635033] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926027, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.641664] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529641ec-cb19-545f-3350-96f4dcef04fd, 'name': SearchDatastore_Task, 'duration_secs': 0.011762} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.641949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.642196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.642429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.642574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.642753] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.643016] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90cbfab9-847d-497f-96aa-d18291eca493 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.650517] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.650741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.651488] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-880ae95f-8b37-4306-ae71-d1046e4e7729 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.656606] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1041.656606] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a2497-8629-3208-fd7b-18fb342ff1eb" [ 1041.656606] env[69994]: _type = "Task" [ 1041.656606] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.666134] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a2497-8629-3208-fd7b-18fb342ff1eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.745916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "e1c00159-d198-4858-b5a3-aa05152b1fda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.746209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.746420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.746602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.746773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.751463] env[69994]: INFO nova.compute.manager [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Terminating instance [ 1041.795057] env[69994]: DEBUG nova.compute.manager [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.795275] env[69994]: DEBUG nova.compute.manager [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing instance network info cache due to event network-changed-3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1041.795472] env[69994]: DEBUG oslo_concurrency.lockutils [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] Acquiring lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.874492] env[69994]: DEBUG nova.compute.utils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1041.878311] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1041.878491] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.929057] env[69994]: DEBUG nova.policy [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddbe228af4ae4cdc8483c4ee5a4ee841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c4d971390cf41fa93029cb4418c8ef8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1041.971951] env[69994]: DEBUG nova.network.neutron [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.039090] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updated VIF entry in instance network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.039496] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.044308] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Successfully updated port: dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.107142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e45473-7815-4374-95a7-23d9110724b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.122488] env[69994]: DEBUG oslo_vmware.api [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926026, 'name': ReconfigVM_Task, 'duration_secs': 0.147273} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.140244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-40113d85-46e5-463f-a7ff-70eec3c330ab tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1042.145858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2693b522-d24c-47a6-a0de-b0ee1075ffae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.158025] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926027, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.160653] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1042.175801] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a2497-8629-3208-fd7b-18fb342ff1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.176665] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e4be881-80c0-4f54-a848-c44bdba1e7a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.185071] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1042.185071] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526dd572-ce80-7933-e14d-d5e948d260d5" [ 1042.185071] env[69994]: _type = "Task" [ 1042.185071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.193573] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526dd572-ce80-7933-e14d-d5e948d260d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.203336] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Successfully created port: 200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.232607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d4934b-c02d-4186-a13a-58810b7fe9ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.240428] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e7e89f-3137-4f8b-9631-eb64ce1ff814 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.269365] env[69994]: DEBUG nova.compute.manager [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1042.269593] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.270623] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea648c0-48f9-491d-b431-eeb01643f082 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.273709] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310b72bc-bad0-45d0-840c-fd517dc3dd57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.283501] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8468748-3027-49d3-ba00-c4f533d3a6f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.286972] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.287221] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89d9753d-6930-4b9e-87bb-a62f25e07567 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.299325] env[69994]: DEBUG nova.compute.provider_tree [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.301683] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 1042.301683] env[69994]: value = "task-2926028" [ 1042.301683] env[69994]: _type = "Task" [ 1042.301683] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.309520] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.383945] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1042.475109] env[69994]: INFO nova.compute.manager [-] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Took 1.02 seconds to deallocate network for instance. [ 1042.544209] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.548113] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Received event network-vif-deleted-a44e6223-be47-4cd2-87c7-44a1fb78bc1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.548113] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Received event network-vif-plugged-bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Acquiring lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.548113] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] No waiting events found dispatching network-vif-plugged-bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.548113] env[69994]: WARNING nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Received unexpected event network-vif-plugged-bc1a6a8e-cea0-48bf-96b9-150002406dfc for instance with vm_state building and task_state spawning. [ 1042.548113] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Received event network-changed-bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.548113] env[69994]: DEBUG nova.compute.manager [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Refreshing instance network info cache due to event network-changed-bc1a6a8e-cea0-48bf-96b9-150002406dfc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Acquiring lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Acquired lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.548113] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Refreshing network info cache for port bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.548113] env[69994]: DEBUG oslo_concurrency.lockutils [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] Acquired lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.548113] env[69994]: DEBUG nova.network.neutron [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Refreshing network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.549835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.549967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.550116] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.632521] env[69994]: DEBUG oslo_vmware.api [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926027, 'name': RemoveSnapshot_Task, 'duration_secs': 0.577999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.633430] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1042.633430] env[69994]: INFO nova.compute.manager [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 14.15 seconds to snapshot the instance on the hypervisor. [ 1042.672499] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.672793] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aaa190e2-b10f-4288-a4fd-da3ad2bb95f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.680876] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1042.680876] env[69994]: value = "task-2926029" [ 1042.680876] env[69994]: _type = "Task" [ 1042.680876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.691017] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.700854] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526dd572-ce80-7933-e14d-d5e948d260d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.701114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.701430] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 4d415c4d-54b2-4324-8e98-9dc476960348/4d415c4d-54b2-4324-8e98-9dc476960348.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.701704] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2920d182-16e7-4e32-af83-e3390e284bb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.709259] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1042.709259] env[69994]: value = "task-2926030" [ 1042.709259] env[69994]: _type = "Task" [ 1042.709259] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.719709] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.803910] env[69994]: DEBUG nova.scheduler.client.report [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.816902] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926028, 'name': PowerOffVM_Task, 'duration_secs': 0.196443} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.817145] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.817326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1042.817659] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68bcf014-5401-46e3-9173-22ec80a954d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.881277] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1042.881533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1042.881680] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Deleting the datastore file [datastore1] e1c00159-d198-4858-b5a3-aa05152b1fda {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.881963] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1efc0f3d-cede-4c1f-82dc-b2a227f1d7db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.891882] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for the task: (returnval){ [ 1042.891882] env[69994]: value = "task-2926032" [ 1042.891882] env[69994]: _type = "Task" [ 1042.891882] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.900104] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.983568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.137411] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.199215] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926029, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.216292] env[69994]: DEBUG nova.compute.manager [None req-da5fc095-4a5a-4252-a57b-68cad2c61a5f tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Found 1 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1043.225382] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926030, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.312438] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.941s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.314801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.836s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.316596] env[69994]: INFO nova.compute.claims [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.351862] env[69994]: INFO nova.scheduler.client.report [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Deleted allocations for instance 1ff25686-e13e-4003-909b-18bf919aa20c [ 1043.394353] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1043.410261] env[69994]: DEBUG oslo_vmware.api [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Task: {'id': task-2926032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.468302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.410261] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.410555] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1043.410689] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1043.410881] env[69994]: INFO nova.compute.manager [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1043.411152] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.415382] env[69994]: DEBUG nova.compute.manager [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1043.415500] env[69994]: DEBUG nova.network.neutron [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1043.426795] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1043.426879] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.427021] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1043.427701] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.427701] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1043.427701] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1043.427701] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1043.427838] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1043.427992] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1043.429039] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1043.429255] env[69994]: DEBUG nova.virt.hardware [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1043.430920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adeb9c6-2853-43bf-b99b-2458baea09b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.439976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d5888e-d3fc-4d4b-99c0-62de6cb69623 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.518701] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Updating instance_info_cache with network_info: [{"id": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "address": "fa:16:3e:06:8f:79", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea09a3f-a8", "ovs_interfaceid": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.587157] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Updated VIF entry in instance network info cache for port bc1a6a8e-cea0-48bf-96b9-150002406dfc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.587619] env[69994]: DEBUG nova.network.neutron [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Updating instance_info_cache with network_info: [{"id": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "address": "fa:16:3e:30:3b:e5", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1a6a8e-ce", "ovs_interfaceid": "bc1a6a8e-cea0-48bf-96b9-150002406dfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.630229] env[69994]: DEBUG nova.network.neutron [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updated VIF entry in instance network info cache for port 3d8fb179-d40f-4e18-8089-07f61c108080. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.630492] env[69994]: DEBUG nova.network.neutron [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [{"id": "3d8fb179-d40f-4e18-8089-07f61c108080", "address": "fa:16:3e:a9:49:3e", "network": {"id": "132b6a5f-2a69-4b91-a418-959f72df76fd", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1585728291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef2aa3bc994a479e838e89fa7058ad64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d8fb179-d4", "ovs_interfaceid": "3d8fb179-d40f-4e18-8089-07f61c108080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.691865] env[69994]: DEBUG oslo_vmware.api [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926029, 'name': PowerOnVM_Task, 'duration_secs': 0.746827} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.692188] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.692400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3f43bd0e-e9e5-444f-9e97-130034b10346 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance '5b9648a7-f26f-4151-be5c-59991035a529' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1043.719712] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926030, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578875} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.719986] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 4d415c4d-54b2-4324-8e98-9dc476960348/4d415c4d-54b2-4324-8e98-9dc476960348.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.720222] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.720485] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1febadaf-1b4e-4405-8bf4-9da1ba3287c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.727463] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1043.727463] env[69994]: value = "task-2926033" [ 1043.727463] env[69994]: _type = "Task" [ 1043.727463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.736466] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926033, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.862324] env[69994]: DEBUG nova.compute.manager [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Received event network-vif-plugged-dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.862543] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Acquiring lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.862750] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.862917] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.863303] env[69994]: DEBUG nova.compute.manager [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] No waiting events found dispatching network-vif-plugged-dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.863303] env[69994]: WARNING nova.compute.manager [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Received unexpected event network-vif-plugged-dea09a3f-a839-4d7e-aa69-37bee8855d79 for instance with vm_state building and task_state spawning. [ 1043.863418] env[69994]: DEBUG nova.compute.manager [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Received event network-changed-dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.863571] env[69994]: DEBUG nova.compute.manager [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Refreshing instance network info cache due to event network-changed-dea09a3f-a839-4d7e-aa69-37bee8855d79. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.863736] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Acquiring lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.864183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8943b820-e6f2-4e38-a340-98325aca664f tempest-ServersListShow296Test-331668703 tempest-ServersListShow296Test-331668703-project-member] Lock "1ff25686-e13e-4003-909b-18bf919aa20c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.107s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.973314] env[69994]: DEBUG nova.compute.manager [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Received event network-vif-plugged-200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.973525] env[69994]: DEBUG oslo_concurrency.lockutils [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] Acquiring lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.973731] env[69994]: DEBUG oslo_concurrency.lockutils [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.973899] env[69994]: DEBUG oslo_concurrency.lockutils [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.974196] env[69994]: DEBUG nova.compute.manager [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] No waiting events found dispatching network-vif-plugged-200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.974404] env[69994]: WARNING nova.compute.manager [req-670d51aa-a47c-469b-889c-263712f8ea5a req-c117c34b-d21a-4fe5-8ada-077931c326e3 service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Received unexpected event network-vif-plugged-200e0161-2f5b-4939-90ae-3eb3457ffac7 for instance with vm_state building and task_state spawning. [ 1043.974853] env[69994]: DEBUG nova.compute.manager [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.975974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ca4c91-3954-4efd-8b83-cb73acbdb856 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.023549] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.023979] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Instance network_info: |[{"id": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "address": "fa:16:3e:06:8f:79", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea09a3f-a8", "ovs_interfaceid": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.024330] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Acquired lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.024550] env[69994]: DEBUG nova.network.neutron [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Refreshing network info cache for port dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.025940] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:8f:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dea09a3f-a839-4d7e-aa69-37bee8855d79', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.038355] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.045379] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.045379] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ec83a9a-889d-4420-b043-8a5d4928b278 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.072558] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.072558] env[69994]: value = "task-2926034" [ 1044.072558] env[69994]: _type = "Task" [ 1044.072558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.082253] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926034, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.090450] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e18aa98-cc8a-4425-8c8e-51be06f32225 req-a1181c1e-3a15-41ef-ba4c-a6e757d801bc service nova] Releasing lock "refresh_cache-4d415c4d-54b2-4324-8e98-9dc476960348" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.137504] env[69994]: DEBUG oslo_concurrency.lockutils [req-8d75cf41-5fe8-4f79-b9a2-dd65b59be1d0 req-49f2525c-4282-41f1-8379-1d06a936c61e service nova] Releasing lock "refresh_cache-e1c00159-d198-4858-b5a3-aa05152b1fda" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.160643] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Successfully updated port: 200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.238031] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926033, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.238402] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.239341] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcd7dab-076e-46fe-931d-17b56d29271a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.261796] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 4d415c4d-54b2-4324-8e98-9dc476960348/4d415c4d-54b2-4324-8e98-9dc476960348.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.264418] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0285de8b-4cfd-4a1d-a5f1-d69d0b12a7da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.286597] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1044.286597] env[69994]: value = "task-2926035" [ 1044.286597] env[69994]: _type = "Task" [ 1044.286597] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.295192] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926035, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.343159] env[69994]: DEBUG nova.network.neutron [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Updated VIF entry in instance network info cache for port dea09a3f-a839-4d7e-aa69-37bee8855d79. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.343562] env[69994]: DEBUG nova.network.neutron [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Updating instance_info_cache with network_info: [{"id": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "address": "fa:16:3e:06:8f:79", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea09a3f-a8", "ovs_interfaceid": "dea09a3f-a839-4d7e-aa69-37bee8855d79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.491823] env[69994]: INFO nova.compute.manager [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] instance snapshotting [ 1044.492571] env[69994]: DEBUG nova.objects.instance [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.587106] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926034, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.666644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.667127] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.667127] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.684266] env[69994]: DEBUG nova.network.neutron [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.686182] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2825e88c-f66c-46be-af93-8ebb66f53517 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.699114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3528d0c-987f-4385-91c0-878e942163f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.735124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c420643-ba7e-4ed0-a043-e3e6d0c6d6c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.743022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f5c727-0c53-4018-9c2a-3f6b047f33af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.756243] env[69994]: DEBUG nova.compute.provider_tree [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.797110] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926035, 'name': ReconfigVM_Task, 'duration_secs': 0.344783} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.797110] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 4d415c4d-54b2-4324-8e98-9dc476960348/4d415c4d-54b2-4324-8e98-9dc476960348.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.798674] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ff6adee-573a-4335-85ad-dc905e62b3d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.805701] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1044.805701] env[69994]: value = "task-2926036" [ 1044.805701] env[69994]: _type = "Task" [ 1044.805701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.813115] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926036, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.846671] env[69994]: DEBUG oslo_concurrency.lockutils [req-ed746ea0-17c4-4122-ad75-a39b6701e5a5 req-1922355a-66d8-4d0c-b9b8-6285d6d0db19 service nova] Releasing lock "refresh_cache-6c81eb8b-78d7-469d-8076-13d8a8f61fec" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.001301] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4016612-4437-47cd-8209-34c93aae7f87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.022320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32853102-af17-4660-bc53-4a7c5a551384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.084240] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926034, 'name': CreateVM_Task, 'duration_secs': 0.597811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.084425] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.085116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.085289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.085608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.085861] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdaf219e-b6cb-498b-b3c8-3dd3c590c502 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.090719] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1045.090719] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c13419-4247-d9d0-435f-cd0b6887dcbe" [ 1045.090719] env[69994]: _type = "Task" [ 1045.090719] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.098327] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c13419-4247-d9d0-435f-cd0b6887dcbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.193181] env[69994]: INFO nova.compute.manager [-] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Took 1.78 seconds to deallocate network for instance. [ 1045.199301] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.259476] env[69994]: DEBUG nova.scheduler.client.report [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.316406] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926036, 'name': Rename_Task, 'duration_secs': 0.14529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.316406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.316512] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1276fdcc-d4d9-40a8-945e-6c2838d73fc0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.324248] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1045.324248] env[69994]: value = "task-2926037" [ 1045.324248] env[69994]: _type = "Task" [ 1045.324248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.336126] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.341539] env[69994]: DEBUG nova.network.neutron [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Updating instance_info_cache with network_info: [{"id": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "address": "fa:16:3e:07:23:94", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200e0161-2f", "ovs_interfaceid": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.533342] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1045.533776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-55855606-3db5-40c7-8b43-d43497e4d36b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.542340] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1045.542340] env[69994]: value = "task-2926038" [ 1045.542340] env[69994]: _type = "Task" [ 1045.542340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.551655] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926038, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.601117] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c13419-4247-d9d0-435f-cd0b6887dcbe, 'name': SearchDatastore_Task, 'duration_secs': 0.022645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.601443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.601682] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.601916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.602074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.602256] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.602535] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-351b65aa-a155-41fd-bbb8-99e14120d5c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.610923] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.611117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.611837] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0437e9ae-b918-4269-803c-ed43e4f12bac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.617436] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1045.617436] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a11914-ea0b-be09-79b8-14c0517fa100" [ 1045.617436] env[69994]: _type = "Task" [ 1045.617436] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.625246] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a11914-ea0b-be09-79b8-14c0517fa100, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.700072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.765541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.765973] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.769968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.772090] env[69994]: INFO nova.compute.claims [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.834623] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926037, 'name': PowerOnVM_Task, 'duration_secs': 0.484517} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.834918] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.835149] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Took 7.36 seconds to spawn the instance on the hypervisor. [ 1045.835337] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.836165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a1cb81-07dd-4af2-b3bd-e6b855739313 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.845023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.845207] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Instance network_info: |[{"id": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "address": "fa:16:3e:07:23:94", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200e0161-2f", "ovs_interfaceid": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1045.845493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:23:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '200e0161-2f5b-4939-90ae-3eb3457ffac7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.854198] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.854682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.854922] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e9badd0-6c13-4d53-9afc-6c1d71d1e911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.883685] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.883685] env[69994]: value = "task-2926039" [ 1045.883685] env[69994]: _type = "Task" [ 1045.883685] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.895126] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926039, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.057059] env[69994]: DEBUG nova.compute.manager [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Received event network-changed-200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.057334] env[69994]: DEBUG nova.compute.manager [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Refreshing instance network info cache due to event network-changed-200e0161-2f5b-4939-90ae-3eb3457ffac7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1046.057570] env[69994]: DEBUG oslo_concurrency.lockutils [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] Acquiring lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.057755] env[69994]: DEBUG oslo_concurrency.lockutils [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] Acquired lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.057956] env[69994]: DEBUG nova.network.neutron [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Refreshing network info cache for port 200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.059707] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926038, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.127927] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a11914-ea0b-be09-79b8-14c0517fa100, 'name': SearchDatastore_Task, 'duration_secs': 0.009703} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.128773] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52df05a6-4715-466b-992e-91708eb0c833 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.134876] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1046.134876] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e92d-e42d-f509-ec14-a4d1e6092bd6" [ 1046.134876] env[69994]: _type = "Task" [ 1046.134876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.142726] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e92d-e42d-f509-ec14-a4d1e6092bd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.276590] env[69994]: DEBUG nova.compute.utils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1046.280022] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1046.280022] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1046.324479] env[69994]: DEBUG nova.policy [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcafd04d09f45fab9d573d11d01dfbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c324e22a0046460b9ad3ad8578f7ef6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1046.362556] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Took 13.49 seconds to build instance. [ 1046.380260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.380451] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.380706] env[69994]: DEBUG nova.compute.manager [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Going to confirm migration 3 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1046.394033] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926039, 'name': CreateVM_Task, 'duration_secs': 0.377787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.394204] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.394765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.394938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.395587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.395587] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60d0c6e5-82b3-47a7-a02d-915db169d925 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.400431] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1046.400431] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527c09b2-c3ec-c3ad-0b3b-d38fa07219d7" [ 1046.400431] env[69994]: _type = "Task" [ 1046.400431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.409700] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527c09b2-c3ec-c3ad-0b3b-d38fa07219d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.555447] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926038, 'name': CreateSnapshot_Task, 'duration_secs': 0.584173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.555745] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1046.556683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1b1d09-b8e4-43b4-bfb7-c20d014c1f05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.588162] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Successfully created port: ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.648369] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5236e92d-e42d-f509-ec14-a4d1e6092bd6, 'name': SearchDatastore_Task, 'duration_secs': 0.023116} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.649251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.649251] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 6c81eb8b-78d7-469d-8076-13d8a8f61fec/6c81eb8b-78d7-469d-8076-13d8a8f61fec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.649503] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50942f32-7aee-481b-a644-1f314970858f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.655203] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1046.655203] env[69994]: value = "task-2926040" [ 1046.655203] env[69994]: _type = "Task" [ 1046.655203] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.663639] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.770605] env[69994]: DEBUG nova.network.neutron [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Updated VIF entry in instance network info cache for port 200e0161-2f5b-4939-90ae-3eb3457ffac7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.770983] env[69994]: DEBUG nova.network.neutron [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Updating instance_info_cache with network_info: [{"id": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "address": "fa:16:3e:07:23:94", "network": {"id": "25863de4-2e36-47ad-9bc4-ad079d84eb42", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1499705426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c4d971390cf41fa93029cb4418c8ef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200e0161-2f", "ovs_interfaceid": "200e0161-2f5b-4939-90ae-3eb3457ffac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.783034] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1046.868524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.012s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.912493] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527c09b2-c3ec-c3ad-0b3b-d38fa07219d7, 'name': SearchDatastore_Task, 'duration_secs': 0.025807} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.914768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.914998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.915258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.915400] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.915643] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.916319] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50c830ba-e9ba-41f6-9e44-a6af83cbee06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.925676] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.925873] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.926594] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6dc37ef-24d3-4a3f-95b1-a6f218540e88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.935426] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1046.935426] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e3ae7-ef40-37d2-e172-03ec26160044" [ 1046.935426] env[69994]: _type = "Task" [ 1046.935426] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.942960] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e3ae7-ef40-37d2-e172-03ec26160044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.955795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.955974] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.956170] env[69994]: DEBUG nova.network.neutron [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.956360] env[69994]: DEBUG nova.objects.instance [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'info_cache' on Instance uuid 5b9648a7-f26f-4151-be5c-59991035a529 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.075456] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1047.078166] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-edeb309c-fcc5-41fc-ac45-1c508ba2b124 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.085805] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1047.085805] env[69994]: value = "task-2926041" [ 1047.085805] env[69994]: _type = "Task" [ 1047.085805] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.092525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0210f1f3-66b2-4102-81c6-5ac5b51c3c13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.099038] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926041, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.103698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d614737-b188-4ac1-a0ab-c152dddac749 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.136184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd609de-f8be-45d6-93d0-5f68dd0a4c7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.144227] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8352e1a-faf3-4f8d-80a4-975739cb5270 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.158496] env[69994]: DEBUG nova.compute.provider_tree [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.168759] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926040, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.274646] env[69994]: DEBUG oslo_concurrency.lockutils [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] Releasing lock "refresh_cache-922799c0-707c-4f4e-a54c-f015eab0a8d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.274646] env[69994]: DEBUG nova.compute.manager [req-f1b9083e-40e4-4061-8a6b-3f1b2a8b614f req-6ccf71ca-01c4-4ad8-8775-db0340302f0c service nova] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Received event network-vif-deleted-3d8fb179-d40f-4e18-8089-07f61c108080 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1047.448026] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e3ae7-ef40-37d2-e172-03ec26160044, 'name': SearchDatastore_Task, 'duration_secs': 0.028765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.448427] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48cbfd35-a664-4a33-8302-d9cc245b7b15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.454587] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1047.454587] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273b10e-38fb-e08e-548b-b7c2856ec5fa" [ 1047.454587] env[69994]: _type = "Task" [ 1047.454587] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.466506] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273b10e-38fb-e08e-548b-b7c2856ec5fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.595574] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926041, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.664663] env[69994]: DEBUG nova.scheduler.client.report [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.671131] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.671611] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 6c81eb8b-78d7-469d-8076-13d8a8f61fec/6c81eb8b-78d7-469d-8076-13d8a8f61fec.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.671834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.672094] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54abedaf-70a1-4a15-aa17-2f2b6490cfc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.678415] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1047.678415] env[69994]: value = "task-2926042" [ 1047.678415] env[69994]: _type = "Task" [ 1047.678415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.686937] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926042, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.796926] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.819927] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.820215] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.820386] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.820571] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.820718] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.820865] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.821078] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.821261] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.821430] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.821592] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.821763] env[69994]: DEBUG nova.virt.hardware [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.822662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f33f19-830f-4480-8215-a3ed64b0df8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.830471] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51abb7e0-21d4-4e98-9bda-cd792a460018 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.966783] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5273b10e-38fb-e08e-548b-b7c2856ec5fa, 'name': SearchDatastore_Task, 'duration_secs': 0.035971} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.969362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.969630] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 922799c0-707c-4f4e-a54c-f015eab0a8d7/922799c0-707c-4f4e-a54c-f015eab0a8d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.969953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b97ae9f-2885-41d6-bbd4-bb88706d13d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.978218] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1047.978218] env[69994]: value = "task-2926043" [ 1047.978218] env[69994]: _type = "Task" [ 1047.978218] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.987418] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.012103] env[69994]: DEBUG nova.compute.manager [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Received event network-vif-plugged-ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.013152] env[69994]: DEBUG oslo_concurrency.lockutils [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.013152] env[69994]: DEBUG oslo_concurrency.lockutils [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.013339] env[69994]: DEBUG oslo_concurrency.lockutils [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.013611] env[69994]: DEBUG nova.compute.manager [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] No waiting events found dispatching network-vif-plugged-ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1048.014311] env[69994]: WARNING nova.compute.manager [req-21cbc087-d86a-4329-9b01-e9b5c2854cd1 req-db6ab21d-33df-41ac-a49f-3f156d2bad12 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Received unexpected event network-vif-plugged-ab89b870-7c43-45dd-878e-c1f922fc3ee4 for instance with vm_state building and task_state spawning. [ 1048.098834] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926041, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.155997] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Successfully updated port: ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.172751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.173489] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1048.179803] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.180560] env[69994]: DEBUG nova.objects.instance [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lazy-loading 'resources' on Instance uuid c98308b3-2431-4f17-9022-bcd9f1e83a35 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.198761] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926042, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.199116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.199954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1e2743-91b0-4dd9-994e-9c434e5bc11b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.225289] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 6c81eb8b-78d7-469d-8076-13d8a8f61fec/6c81eb8b-78d7-469d-8076-13d8a8f61fec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.226743] env[69994]: DEBUG nova.network.neutron [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [{"id": "ad28c14f-638f-4073-b494-cb6a2a579dab", "address": "fa:16:3e:60:ef:9a", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad28c14f-63", "ovs_interfaceid": "ad28c14f-638f-4073-b494-cb6a2a579dab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.230916] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ffbc3a9-3fe7-4eb1-95a9-4c89cc724dc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.253108] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1048.253108] env[69994]: value = "task-2926044" [ 1048.253108] env[69994]: _type = "Task" [ 1048.253108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.263139] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926044, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.488168] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926043, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.537020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0989d980-9dc8-446a-86b4-625685fe2030 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.545403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912422c8-c4b3-46f1-bdc7-8592f3b9e3ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.580401] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affe545f-bc2b-408c-ba70-525d71196800 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.593536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc00d9d-5629-45d8-bfd4-123cfae11568 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.603835] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926041, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.612861] env[69994]: DEBUG nova.compute.provider_tree [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.658783] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.658992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.659162] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.684758] env[69994]: DEBUG nova.compute.utils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1048.686735] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1048.686944] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1048.726907] env[69994]: DEBUG nova.policy [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1048.746650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-5b9648a7-f26f-4151-be5c-59991035a529" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.747032] env[69994]: DEBUG nova.objects.instance [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'migration_context' on Instance uuid 5b9648a7-f26f-4151-be5c-59991035a529 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.763875] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.987600] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726693} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.988072] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 922799c0-707c-4f4e-a54c-f015eab0a8d7/922799c0-707c-4f4e-a54c-f015eab0a8d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.988196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.988380] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-334f4a68-25b6-4f67-8843-15b034fdcf8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.992833] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Successfully created port: 2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.996398] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1048.996398] env[69994]: value = "task-2926045" [ 1048.996398] env[69994]: _type = "Task" [ 1048.996398] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.007017] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926045, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.100205] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926041, 'name': CloneVM_Task, 'duration_secs': 1.598648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.100537] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created linked-clone VM from snapshot [ 1049.101360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a512f03-6264-4726-9a77-0e041d45089f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.109183] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploading image 4ac9f11f-818a-47f3-bec5-a37336d3bae8 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1049.115972] env[69994]: DEBUG nova.scheduler.client.report [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.136516] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1049.136516] env[69994]: value = "vm-587594" [ 1049.136516] env[69994]: _type = "VirtualMachine" [ 1049.136516] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1049.136791] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-29c31ff6-d56c-4911-b01c-b438966abeb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.145475] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease: (returnval){ [ 1049.145475] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c9e77-aea1-880d-623a-6f15470fd882" [ 1049.145475] env[69994]: _type = "HttpNfcLease" [ 1049.145475] env[69994]: } obtained for exporting VM: (result){ [ 1049.145475] env[69994]: value = "vm-587594" [ 1049.145475] env[69994]: _type = "VirtualMachine" [ 1049.145475] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1049.145915] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the lease: (returnval){ [ 1049.145915] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c9e77-aea1-880d-623a-6f15470fd882" [ 1049.145915] env[69994]: _type = "HttpNfcLease" [ 1049.145915] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1049.153242] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.153242] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c9e77-aea1-880d-623a-6f15470fd882" [ 1049.153242] env[69994]: _type = "HttpNfcLease" [ 1049.153242] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1049.190665] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1049.221140] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1049.250229] env[69994]: DEBUG nova.objects.base [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Object Instance<5b9648a7-f26f-4151-be5c-59991035a529> lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1049.251206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451bc7cd-f39d-4ae0-a871-e23d92d5c36e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.263884] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.278338] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d5ed944-c2b3-4f07-9f83-3b77558b648b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.284167] env[69994]: DEBUG oslo_vmware.api [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1049.284167] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200b32a-2bcd-aac1-8fb3-fb823e446776" [ 1049.284167] env[69994]: _type = "Task" [ 1049.284167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.293141] env[69994]: DEBUG oslo_vmware.api [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200b32a-2bcd-aac1-8fb3-fb823e446776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.422954] env[69994]: DEBUG nova.network.neutron [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Updating instance_info_cache with network_info: [{"id": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "address": "fa:16:3e:0f:05:de", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab89b870-7c", "ovs_interfaceid": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.506434] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926045, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.327208} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.506736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.507561] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151ef2e3-c7a7-4e4b-a918-d28856500b89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.529746] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 922799c0-707c-4f4e-a54c-f015eab0a8d7/922799c0-707c-4f4e-a54c-f015eab0a8d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.530030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d0fa917-47f1-4a7a-8da3-37e8675a164e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.549744] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1049.549744] env[69994]: value = "task-2926047" [ 1049.549744] env[69994]: _type = "Task" [ 1049.549744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.557484] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.624154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.626678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.626945] env[69994]: DEBUG nova.objects.instance [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'resources' on Instance uuid 5acdf02b-f61c-46ff-9c36-8e86b9be7738 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.646488] env[69994]: INFO nova.scheduler.client.report [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleted allocations for instance c98308b3-2431-4f17-9022-bcd9f1e83a35 [ 1049.656905] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.656905] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c9e77-aea1-880d-623a-6f15470fd882" [ 1049.656905] env[69994]: _type = "HttpNfcLease" [ 1049.656905] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1049.657174] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1049.657174] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525c9e77-aea1-880d-623a-6f15470fd882" [ 1049.657174] env[69994]: _type = "HttpNfcLease" [ 1049.657174] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1049.657895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6af829-3814-4b27-82b0-14460ae79581 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.665503] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1049.665628] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1049.766116] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e624f613-ea3c-4c38-8db3-c1dbab34ce8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.769997] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926044, 'name': ReconfigVM_Task, 'duration_secs': 1.107966} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.770354] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 6c81eb8b-78d7-469d-8076-13d8a8f61fec/6c81eb8b-78d7-469d-8076-13d8a8f61fec.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.771288] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d7694a9-d524-45ce-a5ff-ca663a53e657 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.777573] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1049.777573] env[69994]: value = "task-2926048" [ 1049.777573] env[69994]: _type = "Task" [ 1049.777573] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.786611] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926048, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.796345] env[69994]: DEBUG oslo_vmware.api [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5200b32a-2bcd-aac1-8fb3-fb823e446776, 'name': SearchDatastore_Task, 'duration_secs': 0.02444} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.801700] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.925962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.926313] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance network_info: |[{"id": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "address": "fa:16:3e:0f:05:de", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab89b870-7c", "ovs_interfaceid": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.926730] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:05:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab89b870-7c43-45dd-878e-c1f922fc3ee4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.935645] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.936944] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.938784] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5974fc8-8102-437b-9dfa-28daca67709f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.959357] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.959357] env[69994]: value = "task-2926049" [ 1049.959357] env[69994]: _type = "Task" [ 1049.959357] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.968498] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926049, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.042465] env[69994]: DEBUG nova.compute.manager [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Received event network-changed-ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.042735] env[69994]: DEBUG nova.compute.manager [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Refreshing instance network info cache due to event network-changed-ab89b870-7c43-45dd-878e-c1f922fc3ee4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1050.042883] env[69994]: DEBUG oslo_concurrency.lockutils [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] Acquiring lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.043039] env[69994]: DEBUG oslo_concurrency.lockutils [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] Acquired lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.043299] env[69994]: DEBUG nova.network.neutron [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Refreshing network info cache for port ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.060257] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926047, 'name': ReconfigVM_Task, 'duration_secs': 0.314786} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.060584] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 922799c0-707c-4f4e-a54c-f015eab0a8d7/922799c0-707c-4f4e-a54c-f015eab0a8d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.061260] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bce85ed-bca7-43ff-bc29-161ac8f27b8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.067362] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1050.067362] env[69994]: value = "task-2926050" [ 1050.067362] env[69994]: _type = "Task" [ 1050.067362] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.076490] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926050, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.156760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a503971e-5434-4990-b6bb-fa5b3486e010 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "c98308b3-2431-4f17-9022-bcd9f1e83a35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.935s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.229249] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1050.258826] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1050.259450] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1050.259679] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1050.259874] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1050.260032] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1050.260276] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1050.260496] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1050.260652] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1050.260823] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1050.261019] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1050.261302] env[69994]: DEBUG nova.virt.hardware [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1050.262263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d88cc4-a245-4c8d-bc6f-c008c7c20ef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.274356] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78d462b-9866-4d20-81cc-72fe4fca7aa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.304116] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926048, 'name': Rename_Task, 'duration_secs': 0.340858} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.304691] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.305074] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3cf7cac7-ec50-4b92-86f2-03c6c6cb9a58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.313031] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1050.313031] env[69994]: value = "task-2926051" [ 1050.313031] env[69994]: _type = "Task" [ 1050.313031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.324021] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.453520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcb2be3-8f47-4fad-96e5-0644e0d57b29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.467777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64ff7e9-9481-498c-b61f-38ef3872b173 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.476471] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926049, 'name': CreateVM_Task, 'duration_secs': 0.383238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.502919] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.504709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.504709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.505202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1050.505997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e652b9e-56b2-4679-b324-75742f15c7c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.509025] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96e4e63b-da01-437e-bee9-8f5417be8f3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.518740] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7ccf7c-35cb-4604-981c-4038f63cdd87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.523838] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1050.523838] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d8c58-c53e-5c42-5448-68cca4ad7838" [ 1050.523838] env[69994]: _type = "Task" [ 1050.523838] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.537523] env[69994]: DEBUG nova.compute.provider_tree [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.548572] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d8c58-c53e-5c42-5448-68cca4ad7838, 'name': SearchDatastore_Task, 'duration_secs': 0.01952} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.549063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.549367] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.549747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.550356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.550356] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.550882] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90e8d21d-820b-43c2-a7cf-e249f4ebfc8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.560095] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.560857] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.561771] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13bfcd83-dc6a-4f5c-8165-45aa12df1310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.573196] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1050.573196] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d3039c-ca12-260a-7e76-053c04d0664a" [ 1050.573196] env[69994]: _type = "Task" [ 1050.573196] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.581456] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926050, 'name': Rename_Task, 'duration_secs': 0.169302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.582511] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.582964] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-043ded69-5e85-4ba6-a29c-a81ca1a14059 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.589823] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d3039c-ca12-260a-7e76-053c04d0664a, 'name': SearchDatastore_Task, 'duration_secs': 0.01287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.591516] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a441f8b-2900-4e3b-8cd1-274e41228669 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.599773] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1050.599773] env[69994]: value = "task-2926052" [ 1050.599773] env[69994]: _type = "Task" [ 1050.599773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.607120] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1050.607120] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f55c7-c1ed-80ce-452d-5d30deb09149" [ 1050.607120] env[69994]: _type = "Task" [ 1050.607120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.621895] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926052, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.631257] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f55c7-c1ed-80ce-452d-5d30deb09149, 'name': SearchDatastore_Task, 'duration_secs': 0.014601} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.632472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.632909] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.633411] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e81face1-6dac-446b-8767-5f4bfcdfb14c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.641506] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1050.641506] env[69994]: value = "task-2926053" [ 1050.641506] env[69994]: _type = "Task" [ 1050.641506] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.654706] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.657850] env[69994]: DEBUG nova.compute.manager [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received event network-vif-plugged-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.658275] env[69994]: DEBUG oslo_concurrency.lockutils [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.658639] env[69994]: DEBUG oslo_concurrency.lockutils [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.658937] env[69994]: DEBUG oslo_concurrency.lockutils [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.659258] env[69994]: DEBUG nova.compute.manager [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] No waiting events found dispatching network-vif-plugged-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1050.659598] env[69994]: WARNING nova.compute.manager [req-7951feb0-e609-423e-889d-888fba79c60a req-5a08f5ab-561f-42b1-8893-9e05fb78f656 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received unexpected event network-vif-plugged-2850c5e6-0790-4289-aab5-45fca743e84f for instance with vm_state building and task_state spawning. [ 1050.671696] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Successfully updated port: 2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1050.823792] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926051, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.871641] env[69994]: DEBUG nova.network.neutron [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Updated VIF entry in instance network info cache for port ab89b870-7c43-45dd-878e-c1f922fc3ee4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.872048] env[69994]: DEBUG nova.network.neutron [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Updating instance_info_cache with network_info: [{"id": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "address": "fa:16:3e:0f:05:de", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab89b870-7c", "ovs_interfaceid": "ab89b870-7c43-45dd-878e-c1f922fc3ee4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.042136] env[69994]: DEBUG nova.scheduler.client.report [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.113302] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926052, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.153688] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926053, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.175797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.176118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.176337] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.240181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "76dbf172-10b2-4439-9d2a-8226ba46062d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.240521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.240751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.240950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.241286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.243876] env[69994]: INFO nova.compute.manager [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Terminating instance [ 1051.321632] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926051, 'name': PowerOnVM_Task, 'duration_secs': 0.570357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.321930] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.322248] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Took 10.37 seconds to spawn the instance on the hypervisor. [ 1051.322514] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.323353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90eafa5c-15da-4eb7-b6af-19711bf9b0bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.374547] env[69994]: DEBUG oslo_concurrency.lockutils [req-0a1e585b-ca86-4a1a-9752-6d9c5b8ea4fb req-e060c5ac-98ff-4b83-addf-96ff471ca438 service nova] Releasing lock "refresh_cache-9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.549119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.551663] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.535s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.551859] env[69994]: DEBUG nova.objects.instance [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1051.573424] env[69994]: INFO nova.scheduler.client.report [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted allocations for instance 5acdf02b-f61c-46ff-9c36-8e86b9be7738 [ 1051.612298] env[69994]: DEBUG oslo_vmware.api [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926052, 'name': PowerOnVM_Task, 'duration_secs': 0.756053} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.612588] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.612852] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1051.613066] env[69994]: DEBUG nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.613862] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f5ff68-f122-40a8-922c-ea9362e0d7fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.651802] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926053, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597224} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.652086] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.652310] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.652782] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a812d3d-5247-4654-9414-5b3281f934a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.659923] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1051.659923] env[69994]: value = "task-2926054" [ 1051.659923] env[69994]: _type = "Task" [ 1051.659923] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.668788] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.714276] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1051.748857] env[69994]: DEBUG nova.compute.manager [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1051.749458] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.750743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259057b1-fe20-4284-b5bb-b6829a00f733 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.761214] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.762647] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9b43098-93a3-477a-863c-647e763ae286 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.768764] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1051.768764] env[69994]: value = "task-2926055" [ 1051.768764] env[69994]: _type = "Task" [ 1051.768764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.777983] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.841033] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Took 18.93 seconds to build instance. [ 1051.859461] env[69994]: DEBUG nova.network.neutron [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updating instance_info_cache with network_info: [{"id": "2850c5e6-0790-4289-aab5-45fca743e84f", "address": "fa:16:3e:14:e2:4a", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2850c5e6-07", "ovs_interfaceid": "2850c5e6-0790-4289-aab5-45fca743e84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.081070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-80eaebe6-902a-44b3-88d6-fd0940ad437d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "5acdf02b-f61c-46ff-9c36-8e86b9be7738" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.016s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.131931] env[69994]: INFO nova.compute.manager [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Took 19.19 seconds to build instance. [ 1052.170428] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066122} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.170729] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.171626] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed5f0a2-95a7-4ff1-b677-bdc3eea0acef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.195256] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.195965] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-466e2da2-0232-4737-80bd-bada97266168 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.215858] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1052.215858] env[69994]: value = "task-2926056" [ 1052.215858] env[69994]: _type = "Task" [ 1052.215858] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.225046] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926056, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.281426] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926055, 'name': PowerOffVM_Task, 'duration_secs': 0.321961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.281746] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.281969] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.282264] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a119bdf-4288-4872-82c0-28d2a742c206 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.342478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.442s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.360083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.360083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.360354] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleting the datastore file [datastore2] 76dbf172-10b2-4439-9d2a-8226ba46062d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.360679] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9415ccb-5aa4-464b-85e1-a3640352e688 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.363594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.363931] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance network_info: |[{"id": "2850c5e6-0790-4289-aab5-45fca743e84f", "address": "fa:16:3e:14:e2:4a", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2850c5e6-07", "ovs_interfaceid": "2850c5e6-0790-4289-aab5-45fca743e84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1052.364324] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:e2:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2850c5e6-0790-4289-aab5-45fca743e84f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.372175] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.373063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.373581] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83de3300-7c39-4cf5-9ef3-ae47a447bba8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.391032] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for the task: (returnval){ [ 1052.391032] env[69994]: value = "task-2926058" [ 1052.391032] env[69994]: _type = "Task" [ 1052.391032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.395640] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.395640] env[69994]: value = "task-2926059" [ 1052.395640] env[69994]: _type = "Task" [ 1052.395640] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.402637] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.408064] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926059, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.560433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-602ad704-6331-4328-b2ab-8e3b0c869b31 tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.561824] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.154s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.562216] env[69994]: DEBUG nova.objects.instance [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.619547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "4d415c4d-54b2-4324-8e98-9dc476960348" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.619839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.620091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.620322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.620548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.622704] env[69994]: INFO nova.compute.manager [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Terminating instance [ 1052.634083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163c086a-f38c-462f-9dcc-52b15632a141 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.717s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.689387] env[69994]: DEBUG nova.compute.manager [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received event network-changed-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.689697] env[69994]: DEBUG nova.compute.manager [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Refreshing instance network info cache due to event network-changed-2850c5e6-0790-4289-aab5-45fca743e84f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1052.689843] env[69994]: DEBUG oslo_concurrency.lockutils [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] Acquiring lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.689987] env[69994]: DEBUG oslo_concurrency.lockutils [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] Acquired lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.690277] env[69994]: DEBUG nova.network.neutron [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Refreshing network info cache for port 2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.726644] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926056, 'name': ReconfigVM_Task, 'duration_secs': 0.48209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.726940] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.727678] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f76f7c56-3aa6-47c8-9b40-9e7a6b17f520 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.734717] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1052.734717] env[69994]: value = "task-2926060" [ 1052.734717] env[69994]: _type = "Task" [ 1052.734717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.744232] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926060, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.842729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.843075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.905187] env[69994]: DEBUG oslo_vmware.api [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Task: {'id': task-2926058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210309} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.908237] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.908446] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.908635] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.908802] env[69994]: INFO nova.compute.manager [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1052.909045] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.909212] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926059, 'name': CreateVM_Task, 'duration_secs': 0.459291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.909401] env[69994]: DEBUG nova.compute.manager [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.909494] env[69994]: DEBUG nova.network.neutron [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.911228] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.912019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.912240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.912670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.913286] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d989068-c848-4ee4-ad71-795a930328a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.918471] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1052.918471] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525282c3-bae1-562a-d0ef-0cdbf1b873f3" [ 1052.918471] env[69994]: _type = "Task" [ 1052.918471] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.927181] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525282c3-bae1-562a-d0ef-0cdbf1b873f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.126541] env[69994]: DEBUG nova.compute.manager [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1053.127080] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1053.127637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61b02a9-3b94-4503-905a-ee7bffaac11f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.140533] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1053.141208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-636bd80c-7143-4f58-bd50-318c593d0724 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.154563] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1053.154563] env[69994]: value = "task-2926061" [ 1053.154563] env[69994]: _type = "Task" [ 1053.154563] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.168601] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.171034] env[69994]: DEBUG nova.compute.manager [req-7e4abd7f-39d8-4723-9901-489407f5d4a9 req-d7c5e9d7-d4a0-4c31-af5b-75af96c4f048 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Received event network-vif-deleted-a2315274-4441-4952-9041-19b79c4a331a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.171312] env[69994]: INFO nova.compute.manager [req-7e4abd7f-39d8-4723-9901-489407f5d4a9 req-d7c5e9d7-d4a0-4c31-af5b-75af96c4f048 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Neutron deleted interface a2315274-4441-4952-9041-19b79c4a331a; detaching it from the instance and deleting it from the info cache [ 1053.171514] env[69994]: DEBUG nova.network.neutron [req-7e4abd7f-39d8-4723-9901-489407f5d4a9 req-d7c5e9d7-d4a0-4c31-af5b-75af96c4f048 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.248734] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926060, 'name': Rename_Task, 'duration_secs': 0.254877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.248734] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.248920] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5f22df4-dc1f-4989-9457-09079384cf8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.259086] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1053.259086] env[69994]: value = "task-2926062" [ 1053.259086] env[69994]: _type = "Task" [ 1053.259086] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.268298] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.346714] env[69994]: INFO nova.compute.manager [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Detaching volume 36d050ad-1ac5-4518-a08c-d07445bd0225 [ 1053.348099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a67a40c-2c59-486a-a4e7-66b5411b8e3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.358260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e318c6-9089-455d-9abe-20f011366abc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.391931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2091b5ff-ee99-4b15-9b6e-9de765386680 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.399648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d010a9-693a-4d22-b75a-a88fd49f8552 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.406785] env[69994]: INFO nova.virt.block_device [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Attempting to driver detach volume 36d050ad-1ac5-4518-a08c-d07445bd0225 from mountpoint /dev/sdb [ 1053.407037] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1053.407232] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587479', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'name': 'volume-36d050ad-1ac5-4518-a08c-d07445bd0225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ab320e59-febb-4f8f-9bc4-74227d29c752', 'attached_at': '', 'detached_at': '', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'serial': '36d050ad-1ac5-4518-a08c-d07445bd0225'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1053.408054] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260866ea-6a32-41f5-8a14-df4792c6a3c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.422598] env[69994]: DEBUG nova.compute.provider_tree [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1053.446324] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c18a204-060d-4c85-929b-5aee4b0efece {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.450730] env[69994]: DEBUG nova.network.neutron [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updated VIF entry in instance network info cache for port 2850c5e6-0790-4289-aab5-45fca743e84f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1053.451130] env[69994]: DEBUG nova.network.neutron [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updating instance_info_cache with network_info: [{"id": "2850c5e6-0790-4289-aab5-45fca743e84f", "address": "fa:16:3e:14:e2:4a", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2850c5e6-07", "ovs_interfaceid": "2850c5e6-0790-4289-aab5-45fca743e84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.458796] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525282c3-bae1-562a-d0ef-0cdbf1b873f3, 'name': SearchDatastore_Task, 'duration_secs': 0.016745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.460820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.461056] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.461305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.461466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.461658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.462570] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b25b843-a6ec-495d-8260-d3c5847e3fb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.465027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ef5a5a-5c59-4b7e-8562-5b1bd7c7a42e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.468440] env[69994]: ERROR nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [req-ef88ef47-26dc-41ae-9a0b-f6ee8ad1f627] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ef88ef47-26dc-41ae-9a0b-f6ee8ad1f627"}]} [ 1053.493249] env[69994]: DEBUG nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1053.494807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc00e94-456c-466c-828a-e4e582f0b678 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.497527] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.497713] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1053.499375] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf2155da-ec49-4e5a-bf13-d0a5cdd8a1c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.504850] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1053.504850] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522fb4f1-5c9c-9ee1-91be-5233aae648fb" [ 1053.504850] env[69994]: _type = "Task" [ 1053.504850] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.517515] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] The volume has not been displaced from its original location: [datastore1] volume-36d050ad-1ac5-4518-a08c-d07445bd0225/volume-36d050ad-1ac5-4518-a08c-d07445bd0225.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1053.523436] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfiguring VM instance instance-00000020 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.524807] env[69994]: DEBUG nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1053.525051] env[69994]: DEBUG nova.compute.provider_tree [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1053.530549] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fe207fa-5274-4226-8a8e-47c300def8ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.552219] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522fb4f1-5c9c-9ee1-91be-5233aae648fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011111} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.554393] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1053.554393] env[69994]: value = "task-2926063" [ 1053.554393] env[69994]: _type = "Task" [ 1053.554393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.555073] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449a012a-c6d9-4ad6-9b5b-6cac5e51d7b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.558050] env[69994]: DEBUG nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1053.569215] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.570661] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1053.570661] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5267d610-e4b5-22de-8f2c-7ab7ff66df69" [ 1053.570661] env[69994]: _type = "Task" [ 1053.570661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.579885] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5267d610-e4b5-22de-8f2c-7ab7ff66df69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.588941] env[69994]: DEBUG nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1053.654741] env[69994]: DEBUG nova.network.neutron [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.670357] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926061, 'name': PowerOffVM_Task, 'duration_secs': 0.273614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.671862] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1053.671862] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1053.672159] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7533a58-f6a2-4d27-882e-c0e63b597012 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.679230] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fc9598b-6df4-4043-9686-fccdae9ac4b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.690468] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd82c2-6c3d-45d3-9da0-6833fcd78081 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.728872] env[69994]: DEBUG nova.compute.manager [req-7e4abd7f-39d8-4723-9901-489407f5d4a9 req-d7c5e9d7-d4a0-4c31-af5b-75af96c4f048 service nova] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Detach interface failed, port_id=a2315274-4441-4952-9041-19b79c4a331a, reason: Instance 76dbf172-10b2-4439-9d2a-8226ba46062d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1053.749403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1053.750865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1053.750865] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleting the datastore file [datastore1] 4d415c4d-54b2-4324-8e98-9dc476960348 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.750865] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b6854d2-851e-4d59-bd2b-408da55f92e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.758157] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1053.758157] env[69994]: value = "task-2926065" [ 1053.758157] env[69994]: _type = "Task" [ 1053.758157] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.774309] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926062, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.780555] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.918962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0da916a-eeb1-4700-aae6-a168800d5c28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.927701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3972ac-0056-4ec6-8bb4-11fb29e5a731 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.961944] env[69994]: DEBUG oslo_concurrency.lockutils [req-5da71a86-85f6-49d6-8ac0-fc7eb2ffeed3 req-fabc9f82-f34a-4c57-aac6-b993534b5115 service nova] Releasing lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.963805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813c2e13-9163-4eb0-b5d6-93328a319a24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.972704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70035aa6-df2c-4bf6-8747-d83f6be8692c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.988604] env[69994]: DEBUG nova.compute.provider_tree [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1054.067466] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926063, 'name': ReconfigVM_Task, 'duration_secs': 0.325297} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.067786] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Reconfigured VM instance instance-00000020 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1054.072741] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4483c296-fdf8-48ce-98f7-7eb6fc12764e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.092738] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5267d610-e4b5-22de-8f2c-7ab7ff66df69, 'name': SearchDatastore_Task, 'duration_secs': 0.010912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.094304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.094794] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 63e1c67b-6a79-4c09-a835-4ff11e15e981/63e1c67b-6a79-4c09-a835-4ff11e15e981.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1054.094958] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1054.094958] env[69994]: value = "task-2926066" [ 1054.094958] env[69994]: _type = "Task" [ 1054.094958] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.095182] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f82cf089-0598-4cb3-93eb-17f38957547d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.105684] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.107356] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1054.107356] env[69994]: value = "task-2926067" [ 1054.107356] env[69994]: _type = "Task" [ 1054.107356] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.116141] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.157974] env[69994]: INFO nova.compute.manager [-] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Took 1.25 seconds to deallocate network for instance. [ 1054.275202] env[69994]: DEBUG oslo_vmware.api [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926062, 'name': PowerOnVM_Task, 'duration_secs': 0.590061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.278491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.278729] env[69994]: INFO nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Took 6.48 seconds to spawn the instance on the hypervisor. [ 1054.278966] env[69994]: DEBUG nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.279315] env[69994]: DEBUG oslo_vmware.api [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.280140] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad03fdc6-f26b-43f1-a718-e054d4b73229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.282874] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.283107] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.283312] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.283503] env[69994]: INFO nova.compute.manager [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1054.283776] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.284154] env[69994]: DEBUG nova.compute.manager [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1054.284154] env[69994]: DEBUG nova.network.neutron [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1054.568756] env[69994]: DEBUG nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1054.568756] env[69994]: DEBUG nova.compute.provider_tree [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 129 to 130 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1054.569029] env[69994]: DEBUG nova.compute.provider_tree [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1054.612517] env[69994]: DEBUG oslo_vmware.api [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926066, 'name': ReconfigVM_Task, 'duration_secs': 0.168958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.616690] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587479', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'name': 'volume-36d050ad-1ac5-4518-a08c-d07445bd0225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ab320e59-febb-4f8f-9bc4-74227d29c752', 'attached_at': '', 'detached_at': '', 'volume_id': '36d050ad-1ac5-4518-a08c-d07445bd0225', 'serial': '36d050ad-1ac5-4518-a08c-d07445bd0225'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1054.625515] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926067, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.665959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.804221] env[69994]: INFO nova.compute.manager [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Took 18.36 seconds to build instance. [ 1055.075795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.514s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.078140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.078419] env[69994]: DEBUG nova.objects.instance [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lazy-loading 'resources' on Instance uuid 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.101658] env[69994]: INFO nova.scheduler.client.report [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e [ 1055.122452] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.122717] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 63e1c67b-6a79-4c09-a835-4ff11e15e981/63e1c67b-6a79-4c09-a835-4ff11e15e981.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1055.122944] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1055.123228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7aacafdd-bade-4623-9873-aa90c5fbc5a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.130014] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1055.130014] env[69994]: value = "task-2926068" [ 1055.130014] env[69994]: _type = "Task" [ 1055.130014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.139200] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.165644] env[69994]: DEBUG nova.objects.instance [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid ab320e59-febb-4f8f-9bc4-74227d29c752 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.200014] env[69994]: DEBUG nova.compute.manager [req-369944b8-43dc-437a-b30f-9872a71a79b4 req-2d85e2b8-34de-4809-9ed8-d88dabf72629 service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Received event network-vif-deleted-bc1a6a8e-cea0-48bf-96b9-150002406dfc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1055.200276] env[69994]: INFO nova.compute.manager [req-369944b8-43dc-437a-b30f-9872a71a79b4 req-2d85e2b8-34de-4809-9ed8-d88dabf72629 service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Neutron deleted interface bc1a6a8e-cea0-48bf-96b9-150002406dfc; detaching it from the instance and deleting it from the info cache [ 1055.200514] env[69994]: DEBUG nova.network.neutron [req-369944b8-43dc-437a-b30f-9872a71a79b4 req-2d85e2b8-34de-4809-9ed8-d88dabf72629 service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.236551] env[69994]: DEBUG nova.network.neutron [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.306508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dab559d-e4e7-48f4-aacc-13845d8883d0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.874s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.553239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.553781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.553957] env[69994]: DEBUG nova.compute.manager [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1055.554816] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b97f81c-c31d-4133-8124-5aae91e52376 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.564031] env[69994]: DEBUG nova.compute.manager [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1055.564838] env[69994]: DEBUG nova.objects.instance [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'flavor' on Instance uuid 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.609877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aae31be2-1446-43a6-9840-eba609485630 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.669s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.642094] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.407789} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.642386] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.643266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665fc933-5c5f-4261-aeb5-f899d2e294cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.675084] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 63e1c67b-6a79-4c09-a835-4ff11e15e981/63e1c67b-6a79-4c09-a835-4ff11e15e981.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.680817] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1fec152-1b8d-408b-a9e7-263f61ac5396 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.702893] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1055.702893] env[69994]: value = "task-2926069" [ 1055.702893] env[69994]: _type = "Task" [ 1055.702893] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.703219] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3366ad2-933b-4944-9cd9-f4ff3cb76eba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.720413] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926069, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.724871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df58dbd7-4c18-4fec-9061-462cff0c205a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.738698] env[69994]: INFO nova.compute.manager [-] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Took 1.45 seconds to deallocate network for instance. [ 1055.763990] env[69994]: DEBUG nova.compute.manager [req-369944b8-43dc-437a-b30f-9872a71a79b4 req-2d85e2b8-34de-4809-9ed8-d88dabf72629 service nova] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Detach interface failed, port_id=bc1a6a8e-cea0-48bf-96b9-150002406dfc, reason: Instance 4d415c4d-54b2-4324-8e98-9dc476960348 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1055.895133] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a5fd2e-df08-40e4-8fe5-10a72838ecd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.903499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6648c44-d041-4f65-9031-4d40aa0b9110 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.937448] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1565b171-f23e-4339-ad47-448330b63750 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.945069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6e3588-2a32-450a-9306-04a8b62b3d45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.958720] env[69994]: DEBUG nova.compute.provider_tree [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.005010] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "5e142f6e-920a-4f11-abff-13eb5c168660" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.005368] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.005687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.005845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.006062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.008425] env[69994]: INFO nova.compute.manager [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Terminating instance [ 1056.197256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36b590d1-1580-44d8-b34a-4d2b7cbb33ce tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.354s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.217544] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926069, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.249039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.378847] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.379253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.379704] env[69994]: DEBUG nova.objects.instance [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.461923] env[69994]: DEBUG nova.scheduler.client.report [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.512677] env[69994]: DEBUG nova.compute.manager [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.512877] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.513832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d4c7bf-8e0b-492e-904e-7af49a2f7910 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.522383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.522669] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c911c51e-e423-4adb-9372-91c74bdc76e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.528781] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1056.528781] env[69994]: value = "task-2926070" [ 1056.528781] env[69994]: _type = "Task" [ 1056.528781] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.537648] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.573078] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.573443] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06c7e2dc-268d-49c0-9c25-1b0bb3e72d0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.580828] env[69994]: DEBUG oslo_vmware.api [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1056.580828] env[69994]: value = "task-2926071" [ 1056.580828] env[69994]: _type = "Task" [ 1056.580828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.589639] env[69994]: DEBUG oslo_vmware.api [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.717797] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926069, 'name': ReconfigVM_Task, 'duration_secs': 0.548825} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.718243] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 63e1c67b-6a79-4c09-a835-4ff11e15e981/63e1c67b-6a79-4c09-a835-4ff11e15e981.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.719043] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-060eea35-fa1f-42a9-8b37-e14afc595ff4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.725674] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1056.725674] env[69994]: value = "task-2926072" [ 1056.725674] env[69994]: _type = "Task" [ 1056.725674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.738333] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926072, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.884176] env[69994]: DEBUG nova.objects.instance [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.966696] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.970227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.986s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.970616] env[69994]: DEBUG nova.objects.instance [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lazy-loading 'resources' on Instance uuid 686feb53-00e2-43d9-b316-09c089df0891 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.990085] env[69994]: INFO nova.scheduler.client.report [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Deleted allocations for instance 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3 [ 1057.040894] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926070, 'name': PowerOffVM_Task, 'duration_secs': 0.257762} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.041254] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.041428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.041731] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4be6d64c-e0a1-47a4-8486-a4ba0d9b1085 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.090285] env[69994]: DEBUG oslo_vmware.api [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926071, 'name': PowerOffVM_Task, 'duration_secs': 0.209628} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.090625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.090867] env[69994]: DEBUG nova.compute.manager [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.091732] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccd9579-ebae-4b46-bf51-eb45bc876600 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.117783] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.118186] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.118339] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore2] 5e142f6e-920a-4f11-abff-13eb5c168660 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.118513] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b2d6f9f-5fc5-44b5-b204-77fa6c09e4f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.126198] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1057.126198] env[69994]: value = "task-2926074" [ 1057.126198] env[69994]: _type = "Task" [ 1057.126198] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.135219] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.236141] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926072, 'name': Rename_Task, 'duration_secs': 0.163259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.236480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1057.236718] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a25e83e-0d11-4573-9cdb-934902db4cba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.243266] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1057.243266] env[69994]: value = "task-2926075" [ 1057.243266] env[69994]: _type = "Task" [ 1057.243266] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.250759] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926075, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.382306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.382615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.382836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.383033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.383211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.385423] env[69994]: INFO nova.compute.manager [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Terminating instance [ 1057.386980] env[69994]: DEBUG nova.objects.base [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1057.387192] env[69994]: DEBUG nova.network.neutron [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1057.497555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7029893f-1e92-4970-af0b-ff6fb494bf66 tempest-ServerPasswordTestJSON-1707718776 tempest-ServerPasswordTestJSON-1707718776-project-member] Lock "93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.906s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.500064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3479f98-7c39-4950-afe2-bc98190ce5c6 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.121s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.605088] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f35d6e28-86c5-4cd0-94e9-0165ca571292 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.641278] env[69994]: DEBUG oslo_vmware.api [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.641571] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.642753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.642753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.642753] env[69994]: INFO nova.compute.manager [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1057.642753] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.642753] env[69994]: DEBUG nova.compute.manager [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.642753] env[69994]: DEBUG nova.network.neutron [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.734701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67c2336-4ff5-4940-906c-854b3a6a2c88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.742877] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1af2df-b64a-4b53-b721-1a9a939544d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.755907] env[69994]: DEBUG oslo_vmware.api [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926075, 'name': PowerOnVM_Task, 'duration_secs': 0.484625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.779311] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.779574] env[69994]: INFO nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Took 7.55 seconds to spawn the instance on the hypervisor. [ 1057.779757] env[69994]: DEBUG nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.781413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99438fd8-1d77-4d9e-9f0c-7ab28044cc0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.784548] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73345358-8f29-4731-9e39-1f58ca9e147b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.796290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4daa15a7-5875-4fd8-b726-c0e50a914ae4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.809719] env[69994]: DEBUG nova.compute.provider_tree [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.891696] env[69994]: DEBUG nova.compute.manager [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1057.891952] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.892888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f8c5dc-85cd-457c-861f-98997e245ed4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.900337] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.900614] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83c7201b-3f9a-4196-8615-65747e8c7e61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.906699] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1057.906699] env[69994]: value = "task-2926076" [ 1057.906699] env[69994]: _type = "Task" [ 1057.906699] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.914481] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.061919] env[69994]: DEBUG nova.compute.manager [req-f856b2fe-094c-4c41-8a8b-5cc45a689ffb req-b27837b4-43ed-4ec5-a7aa-14f62d7fe76a service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Received event network-vif-deleted-961a1fd7-bcab-47f6-a2b7-6dd5fa005a30 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.062151] env[69994]: INFO nova.compute.manager [req-f856b2fe-094c-4c41-8a8b-5cc45a689ffb req-b27837b4-43ed-4ec5-a7aa-14f62d7fe76a service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Neutron deleted interface 961a1fd7-bcab-47f6-a2b7-6dd5fa005a30; detaching it from the instance and deleting it from the info cache [ 1058.062328] env[69994]: DEBUG nova.network.neutron [req-f856b2fe-094c-4c41-8a8b-5cc45a689ffb req-b27837b4-43ed-4ec5-a7aa-14f62d7fe76a service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.237753] env[69994]: INFO nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Rebuilding instance [ 1058.278064] env[69994]: DEBUG nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1058.279186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2142ab-35cb-4de8-b505-cf71b1862c2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.301441] env[69994]: INFO nova.compute.manager [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Took 21.56 seconds to build instance. [ 1058.313476] env[69994]: DEBUG nova.scheduler.client.report [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.420166] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926076, 'name': PowerOffVM_Task, 'duration_secs': 0.446883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.420166] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.421432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.421432] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7870cc6c-569e-413e-8e26-ec2514508dd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.467515] env[69994]: DEBUG nova.network.neutron [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.557208] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.557423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.557606] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleting the datastore file [datastore1] ab320e59-febb-4f8f-9bc4-74227d29c752 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.557881] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d8df2d0-b4fc-4d07-9ebb-9013eb1181d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.566329] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1058.566329] env[69994]: value = "task-2926078" [ 1058.566329] env[69994]: _type = "Task" [ 1058.566329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.566630] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-063f4b26-c81b-44db-85ba-202f3cc123eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.576758] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.580577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba26eea-3c47-4d83-b9a6-a54eccdd26fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.611899] env[69994]: DEBUG nova.compute.manager [req-f856b2fe-094c-4c41-8a8b-5cc45a689ffb req-b27837b4-43ed-4ec5-a7aa-14f62d7fe76a service nova] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Detach interface failed, port_id=961a1fd7-bcab-47f6-a2b7-6dd5fa005a30, reason: Instance 5e142f6e-920a-4f11-abff-13eb5c168660 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1058.803084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08d254be-db47-417d-9851-ddc95fe10832 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.065s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.820395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.823674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.124s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.824182] env[69994]: DEBUG nova.objects.instance [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lazy-loading 'resources' on Instance uuid e1c00159-d198-4858-b5a3-aa05152b1fda {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.849582] env[69994]: INFO nova.scheduler.client.report [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Deleted allocations for instance 686feb53-00e2-43d9-b316-09c089df0891 [ 1058.974514] env[69994]: INFO nova.compute.manager [-] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Took 1.33 seconds to deallocate network for instance. [ 1059.083717] env[69994]: DEBUG oslo_vmware.api [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.084012] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.084207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.084390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.084569] env[69994]: INFO nova.compute.manager [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1059.084843] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.085045] env[69994]: DEBUG nova.compute.manager [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.085149] env[69994]: DEBUG nova.network.neutron [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.295607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1059.295607] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d8aeca6-f66b-49c9-8aa1-6534216bc90e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.304480] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1059.304480] env[69994]: value = "task-2926079" [ 1059.304480] env[69994]: _type = "Task" [ 1059.304480] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.317394] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1059.317669] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1059.318523] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a477d68-1293-4434-8fcf-951e6dd8f0e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.329140] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.329140] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f892662-0ee8-4adc-a00e-d7f3306de4cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.363377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5ae3368e-d6f5-4923-a216-ae9400207d0b tempest-ServersListShow298Test-2120015683 tempest-ServersListShow298Test-2120015683-project-member] Lock "686feb53-00e2-43d9-b316-09c089df0891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.609s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.404787] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.405041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.405231] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.405501] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0470d7bd-1baf-4e6a-a274-9c4f3b542b8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.413571] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1059.413571] env[69994]: value = "task-2926081" [ 1059.413571] env[69994]: _type = "Task" [ 1059.413571] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.425052] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.451509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.451764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.451954] env[69994]: INFO nova.compute.manager [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Shelving [ 1059.482421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.509342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.509751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.509995] env[69994]: DEBUG nova.objects.instance [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.674186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1be61ba-c9c8-4b41-a178-1ed9772835f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.686380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2eb86e-fc2d-4774-8a36-a43e44b26896 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.688865] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1059.689750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d69ff1-84b2-4f27-8bfd-b0f38fbf4be4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.695859] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1059.696507] env[69994]: ERROR oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk due to incomplete transfer. [ 1059.722511] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-afbdc0cb-67c8-4bfe-a6e3-e38ca53c7267 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.725373] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372bd769-87f6-4d09-b3c0-9f57e01a1a60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.733040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1e3f10-98b7-47b7-ba40-53a5649cedb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.737528] env[69994]: DEBUG oslo_vmware.rw_handles [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef95a4-c7d1-b1b5-8b6d-bb9c2f01a4dc/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1059.737654] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploaded image 4ac9f11f-818a-47f3-bec5-a37336d3bae8 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1059.740623] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1059.741272] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9867b86d-1ff5-4f70-b227-1b67e658403f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.751709] env[69994]: DEBUG nova.compute.provider_tree [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.758400] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1059.758400] env[69994]: value = "task-2926082" [ 1059.758400] env[69994]: _type = "Task" [ 1059.758400] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.766953] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926082, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.923459] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178373} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.923860] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.924092] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.924291] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1060.112651] env[69994]: DEBUG nova.objects.instance [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.255301] env[69994]: DEBUG nova.compute.manager [req-156d9fd0-7caf-4110-a3b4-30a7a6cc7373 req-7df62650-1573-44c1-97b3-9db89284eda9 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Received event network-vif-deleted-da926370-b1f8-440c-a006-0135408e8d6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.255516] env[69994]: INFO nova.compute.manager [req-156d9fd0-7caf-4110-a3b4-30a7a6cc7373 req-7df62650-1573-44c1-97b3-9db89284eda9 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Neutron deleted interface da926370-b1f8-440c-a006-0135408e8d6f; detaching it from the instance and deleting it from the info cache [ 1060.255691] env[69994]: DEBUG nova.network.neutron [req-156d9fd0-7caf-4110-a3b4-30a7a6cc7373 req-7df62650-1573-44c1-97b3-9db89284eda9 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.260020] env[69994]: DEBUG nova.scheduler.client.report [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.274269] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926082, 'name': Destroy_Task, 'duration_secs': 0.368416} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.275100] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroyed the VM [ 1060.275354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1060.277850] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-12a81ec1-2ed5-4d71-9ec3-685389b329b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.284397] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1060.284397] env[69994]: value = "task-2926083" [ 1060.284397] env[69994]: _type = "Task" [ 1060.284397] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.292466] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926083, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.469705] env[69994]: DEBUG nova.network.neutron [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.472196] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.473121] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30b51fb2-8a6b-495b-894d-2e54e892f7e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.481415] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1060.481415] env[69994]: value = "task-2926084" [ 1060.481415] env[69994]: _type = "Task" [ 1060.481415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.492128] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.615576] env[69994]: DEBUG nova.objects.base [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1060.615859] env[69994]: DEBUG nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1060.681336] env[69994]: DEBUG nova.policy [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.764590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.941s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.766747] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d39710c5-9782-4f23-92ec-b9667287c6f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.768979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.967s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.781290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c4fc15-776b-4f8e-9f02-480497eaa702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.795115] env[69994]: INFO nova.scheduler.client.report [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Deleted allocations for instance e1c00159-d198-4858-b5a3-aa05152b1fda [ 1060.809462] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926083, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.824605] env[69994]: DEBUG nova.compute.manager [req-156d9fd0-7caf-4110-a3b4-30a7a6cc7373 req-7df62650-1573-44c1-97b3-9db89284eda9 service nova] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Detach interface failed, port_id=da926370-b1f8-440c-a006-0135408e8d6f, reason: Instance ab320e59-febb-4f8f-9bc4-74227d29c752 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1060.968101] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1060.968101] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.968101] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1060.968101] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.968580] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1060.968905] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1060.969274] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1060.969585] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1060.969898] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1060.970250] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1060.970584] env[69994]: DEBUG nova.virt.hardware [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1060.971844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6019c913-967e-44d4-ae6a-01d97e1bd958 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.976490] env[69994]: INFO nova.compute.manager [-] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Took 1.89 seconds to deallocate network for instance. [ 1060.994252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca7f07f-c8ba-4871-86ae-4d5913fa7246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.005514] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926084, 'name': PowerOffVM_Task, 'duration_secs': 0.207502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.016245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1061.016840] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:05:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab89b870-7c43-45dd-878e-c1f922fc3ee4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.024894] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.025991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a815d5f3-9e52-4602-8223-dbd05f600fe9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.028671] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.028940] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-861599f5-9d3d-413f-9ade-81ac1fb4dcc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.063939] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ef2a88-19be-42db-b1fc-6ade9bd9c4e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.066891] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.066891] env[69994]: value = "task-2926085" [ 1061.066891] env[69994]: _type = "Task" [ 1061.066891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.079880] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926085, 'name': CreateVM_Task} progress is 15%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.309009] env[69994]: DEBUG oslo_vmware.api [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926083, 'name': RemoveSnapshot_Task, 'duration_secs': 0.904457} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.309624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b165a38f-854f-479e-8d5e-89684aef18dc tempest-AttachInterfacesUnderV243Test-349869599 tempest-AttachInterfacesUnderV243Test-349869599-project-member] Lock "e1c00159-d198-4858-b5a3-aa05152b1fda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.563s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.313354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1061.313702] env[69994]: INFO nova.compute.manager [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 16.31 seconds to snapshot the instance on the hypervisor. [ 1061.370801] env[69994]: DEBUG nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Successfully created port: 980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1061.485017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.533314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6101bdc1-f1f1-4417-b01a-6dffcdc0b5c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.540961] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757416d7-50e1-4b55-92f9-46b9e667cd5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.580742] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1061.584693] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6117011c-c985-425d-adce-24749af8fec0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.588100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7ad2c3-32e8-4df9-9d84-33f3bf130cc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.596546] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926085, 'name': CreateVM_Task, 'duration_secs': 0.339729} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.599775] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.600299] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1061.600299] env[69994]: value = "task-2926086" [ 1061.600299] env[69994]: _type = "Task" [ 1061.600299] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.601018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.601319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.601725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1061.603235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185fd0ba-9703-4c16-90a4-c02969ece01d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.607623] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7a35433-6207-42ec-b6c9-a989dba664b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.627664] env[69994]: DEBUG nova.compute.provider_tree [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.635925] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1061.635925] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b1e538-65de-9cc9-91da-5a1d0795558f" [ 1061.635925] env[69994]: _type = "Task" [ 1061.635925] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.646145] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b1e538-65de-9cc9-91da-5a1d0795558f, 'name': SearchDatastore_Task, 'duration_secs': 0.012157} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.647132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.647403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.647659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.647827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.648037] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.648654] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e042e7d-1f9c-4441-8702-09b4ca2bcf48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.657322] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.657550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1061.658470] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a44aeda2-966b-44f6-b092-a90e819f98bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.664950] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1061.664950] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528ff183-2d2c-89c0-d32d-cfdb7c3aaeff" [ 1061.664950] env[69994]: _type = "Task" [ 1061.664950] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.673702] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528ff183-2d2c-89c0-d32d-cfdb7c3aaeff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.863575] env[69994]: DEBUG nova.compute.manager [None req-67e67fe6-432a-4b10-b692-ee5378efe387 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Found 2 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1062.119594] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926086, 'name': CreateSnapshot_Task, 'duration_secs': 0.485618} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.120981] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1062.126847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac77f123-3353-49a2-8eef-a182b948d469 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.138604] env[69994]: DEBUG nova.scheduler.client.report [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.182249] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528ff183-2d2c-89c0-d32d-cfdb7c3aaeff, 'name': SearchDatastore_Task, 'duration_secs': 0.012986} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.185876] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60adda61-51c3-4317-8415-0f3eb16206ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.191904] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1062.191904] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528925ca-47e8-7aaf-5b21-a2908cb92482" [ 1062.191904] env[69994]: _type = "Task" [ 1062.191904] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.200590] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528925ca-47e8-7aaf-5b21-a2908cb92482, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.632241] env[69994]: DEBUG nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.633493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1d9a3a-29dd-4268-80f3-ad3ac807e981 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.659669] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1062.661959] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6f49014e-29c7-428d-8009-57fb90e88dcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.673537] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1062.673537] env[69994]: value = "task-2926087" [ 1062.673537] env[69994]: _type = "Task" [ 1062.673537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.684684] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926087, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.708655] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528925ca-47e8-7aaf-5b21-a2908cb92482, 'name': SearchDatastore_Task, 'duration_secs': 0.009418} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.709373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.712041] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1062.712041] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2747afbf-1501-4aca-b1e7-7e468a6de029 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.718309] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1062.718309] env[69994]: value = "task-2926088" [ 1062.718309] env[69994]: _type = "Task" [ 1062.718309] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.729667] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.150391] env[69994]: INFO nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] instance snapshotting [ 1063.151058] env[69994]: DEBUG nova.objects.instance [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.162520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.393s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.165681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.500s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.169598] env[69994]: DEBUG nova.objects.instance [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lazy-loading 'resources' on Instance uuid 76dbf172-10b2-4439-9d2a-8226ba46062d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.187307] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926087, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.202312] env[69994]: DEBUG nova.compute.manager [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-plugged-980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.202312] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.202312] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.203151] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.203527] env[69994]: DEBUG nova.compute.manager [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] No waiting events found dispatching network-vif-plugged-980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.203849] env[69994]: WARNING nova.compute.manager [req-dc2550b8-d16e-4092-9bc4-d6fb707a4578 req-81e0e769-4103-4784-a107-556def26d385 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received unexpected event network-vif-plugged-980fbc36-1a58-4992-a66c-ec31e2a90b67 for instance with vm_state active and task_state None. [ 1063.229478] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.229952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.230413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.230849] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f237a62-2e4f-4ab0-8132-8e5922a63f4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.240599] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1063.240599] env[69994]: value = "task-2926089" [ 1063.240599] env[69994]: _type = "Task" [ 1063.240599] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.256313] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.452853] env[69994]: DEBUG nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Successfully updated port: 980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1063.657487] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788d2eaa-fb32-465b-a033-5cbd4715bf7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.700026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c951fc0b-d44a-47a9-9b64-550ede031818 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.709599] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926087, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.755217] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.755882] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.759756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f4d9c4-ad14-40f9-8d83-016376fae4ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.781506] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.782901] env[69994]: INFO nova.scheduler.client.report [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocation for migration 19953643-7357-4d7b-9f22-c7785db7cea6 [ 1063.786838] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e876fec4-a234-468c-ac51-811cfd77a663 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.813312] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1063.813312] env[69994]: value = "task-2926090" [ 1063.813312] env[69994]: _type = "Task" [ 1063.813312] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.821782] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926090, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.959351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.959351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.959351] env[69994]: DEBUG nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.033720] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c4c6e3-b723-4576-9cf3-21af8f5f3d23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.043781] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a68adc-6b79-43a1-8c40-65b937bf6821 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.081526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37846c31-f685-4de7-80df-b7e51840bdde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.089754] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b6f209-63f7-4764-91dc-cdbb446b2980 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.105040] env[69994]: DEBUG nova.compute.provider_tree [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.203136] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926087, 'name': CloneVM_Task, 'duration_secs': 1.412759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.203484] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Created linked-clone VM from snapshot [ 1064.204205] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d580bb-c5a3-4bdf-8e70-c6cbd5f7b767 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.215018] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Uploading image 9941d076-1089-4dc8-ad0f-666d4744ab9e {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1064.218811] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1064.220799] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a404891a-b7c4-4581-9cbf-9dcbd6ca204c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.227986] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1064.227986] env[69994]: value = "task-2926091" [ 1064.227986] env[69994]: _type = "Task" [ 1064.227986] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.236849] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926091, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.250352] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1064.250352] env[69994]: value = "vm-587599" [ 1064.250352] env[69994]: _type = "VirtualMachine" [ 1064.250352] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1064.250352] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-12d45a64-1116-4863-97b4-6ab1abbf2696 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.255438] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lease: (returnval){ [ 1064.255438] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a3340-57a6-9daf-591e-b5a7aaa7ccd1" [ 1064.255438] env[69994]: _type = "HttpNfcLease" [ 1064.255438] env[69994]: } obtained for exporting VM: (result){ [ 1064.255438] env[69994]: value = "vm-587599" [ 1064.255438] env[69994]: _type = "VirtualMachine" [ 1064.255438] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1064.255728] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the lease: (returnval){ [ 1064.255728] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a3340-57a6-9daf-591e-b5a7aaa7ccd1" [ 1064.255728] env[69994]: _type = "HttpNfcLease" [ 1064.255728] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1064.262434] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1064.262434] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a3340-57a6-9daf-591e-b5a7aaa7ccd1" [ 1064.262434] env[69994]: _type = "HttpNfcLease" [ 1064.262434] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1064.309939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6f652ba-3357-4eb2-8c8e-fddd5f84e058 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.929s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.327703] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926090, 'name': ReconfigVM_Task, 'duration_secs': 0.355197} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.327999] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05/9dbaceb8-fa4d-40c4-9f0e-fa9749663a05.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.331907] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ed9cca6-5a33-4a66-9910-55b65efcaec0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.339700] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1064.339700] env[69994]: value = "task-2926093" [ 1064.339700] env[69994]: _type = "Task" [ 1064.339700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.350337] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926093, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.527237] env[69994]: WARNING nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1064.607885] env[69994]: DEBUG nova.scheduler.client.report [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.746562] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926091, 'name': CreateSnapshot_Task, 'duration_secs': 0.445913} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.749012] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1064.749012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54971e6d-9ea5-4452-a207-5ca7005e8e5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.770843] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1064.770843] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a3340-57a6-9daf-591e-b5a7aaa7ccd1" [ 1064.770843] env[69994]: _type = "HttpNfcLease" [ 1064.770843] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1064.771387] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1064.771387] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a3340-57a6-9daf-591e-b5a7aaa7ccd1" [ 1064.771387] env[69994]: _type = "HttpNfcLease" [ 1064.771387] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1064.772320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ea473f-e494-4df3-82ac-1d4b664ff29c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.779325] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1064.779514] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1064.854777] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926093, 'name': Rename_Task, 'duration_secs': 0.206974} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.855359] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.855640] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f55cb054-f212-4513-b15a-2eefc2b51244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.863069] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1064.863069] env[69994]: value = "task-2926094" [ 1064.863069] env[69994]: _type = "Task" [ 1064.863069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.871749] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.886962] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-34544ed7-f713-4e51-945f-41d721919937 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.109038] env[69994]: DEBUG nova.network.neutron [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "address": "fa:16:3e:a6:cb:86", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980fbc36-1a", "ovs_interfaceid": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.119958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.123812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.870s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.123812] env[69994]: DEBUG nova.objects.instance [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lazy-loading 'resources' on Instance uuid 4d415c4d-54b2-4324-8e98-9dc476960348 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.171692] env[69994]: INFO nova.scheduler.client.report [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Deleted allocations for instance 76dbf172-10b2-4439-9d2a-8226ba46062d [ 1065.279136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1065.279894] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-19fdeb1d-4adc-4e29-81ec-fd4dd5a44c83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.292019] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1065.292019] env[69994]: value = "task-2926095" [ 1065.292019] env[69994]: _type = "Task" [ 1065.292019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.300214] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926095, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.374763] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926094, 'name': PowerOnVM_Task, 'duration_secs': 0.461725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.375144] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.375513] env[69994]: DEBUG nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.376509] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65af35b2-58c4-4e23-b624-815834aa025c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.614603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.615394] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.618134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.618527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b537fd97-74dd-45e5-bb63-cb8829916e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.646211] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.646651] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.646913] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.647222] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.647475] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.647827] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.648766] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.648766] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.648766] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.648950] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.649119] env[69994]: DEBUG nova.virt.hardware [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.658881] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1065.658881] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58b1330f-5847-48bb-8659-98046d7698f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.686017] env[69994]: DEBUG oslo_vmware.api [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1065.686017] env[69994]: value = "task-2926096" [ 1065.686017] env[69994]: _type = "Task" [ 1065.686017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.686017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c0950f8-3cb1-4edc-a4e0-62fe6ddf65c6 tempest-ListImageFiltersTestJSON-1191915590 tempest-ListImageFiltersTestJSON-1191915590-project-member] Lock "76dbf172-10b2-4439-9d2a-8226ba46062d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.699281] env[69994]: DEBUG oslo_vmware.api [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926096, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.802355] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926095, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.889927] env[69994]: INFO nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] bringing vm to original state: 'stopped' [ 1065.925817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.928108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.010501] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958c25be-80a0-4b40-bdd9-34cdfe5173a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.022861] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53df32ba-5c6e-417c-a76e-0197994affd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.069718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de445553-bc20-4508-9057-aa000e82db0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.077778] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0baea2-9ede-4b21-b4d7-0a0b1470264c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.086586] env[69994]: DEBUG nova.compute.manager [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-changed-980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.086586] env[69994]: DEBUG nova.compute.manager [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing instance network info cache due to event network-changed-980fbc36-1a58-4992-a66c-ec31e2a90b67. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1066.086586] env[69994]: DEBUG oslo_concurrency.lockutils [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.086586] env[69994]: DEBUG oslo_concurrency.lockutils [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.086586] env[69994]: DEBUG nova.network.neutron [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing network info cache for port 980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1066.097137] env[69994]: DEBUG nova.compute.provider_tree [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.197966] env[69994]: DEBUG oslo_vmware.api [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.307395] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926095, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.434511] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1066.606026] env[69994]: DEBUG nova.scheduler.client.report [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.699119] env[69994]: DEBUG oslo_vmware.api [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926096, 'name': ReconfigVM_Task, 'duration_secs': 0.724411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.702018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.702018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1066.809024] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926095, 'name': CloneVM_Task, 'duration_secs': 1.195941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.809024] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Created linked-clone VM from snapshot [ 1066.809024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b493aaa-e12f-43be-847f-dcdc47bc744d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.815251] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploading image fddd86c8-01b9-4804-8117-0bf676833773 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1066.896636] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1066.896636] env[69994]: value = "vm-587601" [ 1066.896636] env[69994]: _type = "VirtualMachine" [ 1066.896636] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1066.897267] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-11d87963-e290-4f0b-bb06-8f9e11cb48b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.900856] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.902190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.902190] env[69994]: DEBUG nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.902940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1a2ad3-9094-4f39-bc4c-2175d3d90ecb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.913299] env[69994]: DEBUG nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1066.918037] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease: (returnval){ [ 1066.918037] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b21d0a-d489-7943-2ffc-3cd143537887" [ 1066.918037] env[69994]: _type = "HttpNfcLease" [ 1066.918037] env[69994]: } obtained for exporting VM: (result){ [ 1066.918037] env[69994]: value = "vm-587601" [ 1066.918037] env[69994]: _type = "VirtualMachine" [ 1066.918037] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1066.919135] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the lease: (returnval){ [ 1066.919135] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b21d0a-d489-7943-2ffc-3cd143537887" [ 1066.919135] env[69994]: _type = "HttpNfcLease" [ 1066.919135] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1066.929239] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1066.929239] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b21d0a-d489-7943-2ffc-3cd143537887" [ 1066.929239] env[69994]: _type = "HttpNfcLease" [ 1066.929239] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1066.930679] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1066.930679] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b21d0a-d489-7943-2ffc-3cd143537887" [ 1066.930679] env[69994]: _type = "HttpNfcLease" [ 1066.930679] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1066.931603] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b33b6a-3bc6-4007-bd5c-704330b5fdf0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.944781] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1066.945201] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1067.008483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.073899] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5beea3c-99bc-42c2-b884-b3a78f418e5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.112850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.115135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.633s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.115345] env[69994]: DEBUG nova.objects.instance [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid 5e142f6e-920a-4f11-abff-13eb5c168660 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.136613] env[69994]: INFO nova.scheduler.client.report [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted allocations for instance 4d415c4d-54b2-4324-8e98-9dc476960348 [ 1067.205082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0f31b87-53c2-4ed6-bbfe-8ba8f2045bce tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.695s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.344873] env[69994]: DEBUG nova.network.neutron [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updated VIF entry in instance network info cache for port 980fbc36-1a58-4992-a66c-ec31e2a90b67. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1067.345392] env[69994]: DEBUG nova.network.neutron [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "address": "fa:16:3e:a6:cb:86", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980fbc36-1a", "ovs_interfaceid": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.427320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.427320] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d49afcb-c04b-4ec5-bb7a-3cd82fd97efd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.434872] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1067.434872] env[69994]: value = "task-2926098" [ 1067.434872] env[69994]: _type = "Task" [ 1067.434872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.443990] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.498228] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.498228] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.645785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aecced8-ce7b-4720-8494-1951d645a62a tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "4d415c4d-54b2-4324-8e98-9dc476960348" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.026s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.848830] env[69994]: DEBUG oslo_concurrency.lockutils [req-f7e69411-a308-405e-aa0a-122862eb7f96 req-58838577-0cc1-46e2-b100-e092be5056a3 service nova] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.864443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c524d40-4dc2-4c1c-8108-997696389b19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.872656] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ddadeb-01a5-4b4a-8b66-655c2723815b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.908827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f793f4f-5879-4128-af91-c28054051e31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.917800] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802a3623-e3ac-4340-b3a8-2ecd94efe4b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.933368] env[69994]: DEBUG nova.compute.provider_tree [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.944380] env[69994]: DEBUG oslo_vmware.api [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926098, 'name': PowerOffVM_Task, 'duration_secs': 0.238942} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.945545] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.945900] env[69994]: DEBUG nova.compute.manager [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.947025] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0403eb44-600f-4ab8-978f-620819e5d7ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.000249] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1068.438077] env[69994]: DEBUG nova.scheduler.client.report [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.461593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.523984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.705982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.706390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.907020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.907345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.907539] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "5b9648a7-f26f-4151-be5c-59991035a529-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.907815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.907995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.910487] env[69994]: INFO nova.compute.manager [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Terminating instance [ 1068.947022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.950120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.465s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.950489] env[69994]: DEBUG nova.objects.instance [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'resources' on Instance uuid ab320e59-febb-4f8f-9bc4-74227d29c752 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.970188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.976522] env[69994]: INFO nova.scheduler.client.report [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance 5e142f6e-920a-4f11-abff-13eb5c168660 [ 1069.210230] env[69994]: DEBUG nova.compute.utils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1069.417837] env[69994]: DEBUG nova.compute.manager [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1069.417837] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.419411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb78cf2c-362d-4a8a-8c9e-d388519f6f87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.428920] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.428920] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec9b89e4-7f9c-4af5-9f07-91c716d584e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.437596] env[69994]: DEBUG oslo_vmware.api [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1069.437596] env[69994]: value = "task-2926099" [ 1069.437596] env[69994]: _type = "Task" [ 1069.437596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.485669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63e1d4f8-ab53-4509-8dd2-3303b9d1ffd6 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "5e142f6e-920a-4f11-abff-13eb5c168660" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.480s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.661044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0df00ba-9fdd-448a-9b56-04292ceed6e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.670138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9072b06-649e-43c5-b867-28fa63a20b6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.712880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b04985-5ca3-4c8f-b3a1-7c4644e1184b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.716774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.722848] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b73ae3-25d5-4082-9c0e-72a3278804cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.738068] env[69994]: DEBUG nova.compute.provider_tree [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.948808] env[69994]: DEBUG oslo_vmware.api [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926099, 'name': PowerOffVM_Task, 'duration_secs': 0.294823} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.948808] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.949296] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.949296] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38d6b015-6233-426e-ab1e-68ee349c115d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.012979] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.013215] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.013467] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore1] 5b9648a7-f26f-4151-be5c-59991035a529 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.013656] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e37994a-17b8-42b8-b164-c023387fa999 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.020859] env[69994]: DEBUG oslo_vmware.api [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1070.020859] env[69994]: value = "task-2926101" [ 1070.020859] env[69994]: _type = "Task" [ 1070.020859] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.029412] env[69994]: DEBUG oslo_vmware.api [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.241928] env[69994]: DEBUG nova.scheduler.client.report [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.531571] env[69994]: DEBUG oslo_vmware.api [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150187} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.532016] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.532437] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.532863] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.532985] env[69994]: INFO nova.compute.manager [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1070.533443] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1070.533736] env[69994]: DEBUG nova.compute.manager [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1070.533872] env[69994]: DEBUG nova.network.neutron [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1070.750526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.754037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.745s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.755440] env[69994]: INFO nova.compute.claims [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1070.779636] env[69994]: INFO nova.scheduler.client.report [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted allocations for instance ab320e59-febb-4f8f-9bc4-74227d29c752 [ 1070.790269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.790591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.794023] env[69994]: INFO nova.compute.manager [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Attaching volume dc4ef527-168d-4d24-a145-554ce6a61cad to /dev/sdb [ 1070.843022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3cf72e-7fd9-40cf-9cba-1bacc39ceea2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.850881] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639823ce-b72f-44a4-96a5-c2b94d215662 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.866526] env[69994]: DEBUG nova.virt.block_device [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating existing volume attachment record: 0f269892-ecdf-412c-88df-276a2d9e9baa {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1071.291033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d87a516-8f81-4dd7-a3bb-534d78efc4da tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "ab320e59-febb-4f8f-9bc4-74227d29c752" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.908s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.358556] env[69994]: DEBUG nova.network.neutron [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.861080] env[69994]: INFO nova.compute.manager [-] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Took 1.33 seconds to deallocate network for instance. [ 1071.946455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1188383e-4e43-489e-8766-3ef7c4869907 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.954347] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3dfbac-8d90-4566-b17d-cbc52419a953 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.985430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f247fdf3-3e7e-43c6-97f0-5f8c8477e3ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.993691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdfbead-3740-4543-bf0f-f8d2c6dbcf2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.007632] env[69994]: DEBUG nova.compute.provider_tree [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.369156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.373909] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1072.374905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ab6da0-ea6b-4df2-aeea-0fa94ef7072d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.381870] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1072.382071] env[69994]: ERROR oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk due to incomplete transfer. [ 1072.382321] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-08abeafb-6646-4da2-95c3-3178575fd31e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.391440] env[69994]: DEBUG oslo_vmware.rw_handles [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cf1a2-6c12-0c36-d8f1-333682cd86e2/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1072.391440] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Uploaded image 9941d076-1089-4dc8-ad0f-666d4744ab9e to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1072.393286] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1072.393648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-07eb2f08-4999-46f9-9636-37e0f1c53f6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.400417] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1072.400417] env[69994]: value = "task-2926105" [ 1072.400417] env[69994]: _type = "Task" [ 1072.400417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.409113] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926105, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.511540] env[69994]: DEBUG nova.scheduler.client.report [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.911300] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926105, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.018045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.018178] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1073.021332] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.497s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.022679] env[69994]: INFO nova.compute.claims [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.101649] env[69994]: DEBUG nova.compute.manager [req-1649c1df-d7f7-41cd-b644-6f76c4b410b4 req-a030b2c4-839f-453f-8199-9c1b6f7ea9e4 service nova] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Received event network-vif-deleted-ad28c14f-638f-4073-b494-cb6a2a579dab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1073.299797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.300535] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.341320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-dda9491b-4aa9-4a86-9da0-a1f044e25c0e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.341713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-dda9491b-4aa9-4a86-9da0-a1f044e25c0e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.341956] env[69994]: DEBUG nova.objects.instance [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.412275] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926105, 'name': Destroy_Task, 'duration_secs': 0.605992} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.412717] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Destroyed the VM [ 1073.412847] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1073.413247] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-930ed47e-1156-43bd-893c-686b836e94b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.420519] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1073.420519] env[69994]: value = "task-2926107" [ 1073.420519] env[69994]: _type = "Task" [ 1073.420519] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.429938] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926107, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.526810] env[69994]: DEBUG nova.compute.utils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1073.530519] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1073.530665] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1073.603079] env[69994]: DEBUG nova.policy [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75c4f3c8013e4487aeb79068c4b7a3d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0af2d3f09d264d4c9bba8747f74383bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1073.763857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.764694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.802894] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1073.932604] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926107, 'name': RemoveSnapshot_Task, 'duration_secs': 0.366624} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.932949] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1073.933301] env[69994]: DEBUG nova.compute.manager [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.934749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3643377d-8bdd-4314-8bbf-1ad314f73205 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.980258] env[69994]: DEBUG nova.objects.instance [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.031643] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1074.044878] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Successfully created port: dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1074.093390] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1074.094584] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7a5d3d-60ab-491c-8eac-010b29ad4fe2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.102762] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1074.102975] env[69994]: ERROR oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk due to incomplete transfer. [ 1074.104254] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d4fec2cc-125e-41f8-ba48-ba54b9f7f9e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.112974] env[69994]: DEBUG oslo_vmware.rw_handles [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526c6960-b7ab-2c78-a38f-7a11db8c07d3/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1074.112974] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Uploaded image fddd86c8-01b9-4804-8117-0bf676833773 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1074.114226] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1074.114467] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c2ee7173-3873-4ca4-944c-275d943b92c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.121691] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1074.121691] env[69994]: value = "task-2926108" [ 1074.121691] env[69994]: _type = "Task" [ 1074.121691] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.130323] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926108, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.239871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11633493-209b-4779-8492-9b0c8f239553 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.249527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820f5886-a770-44d0-8e44-c94e28d2e3a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.298154] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.302113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.302380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.302582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.302758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.302929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.305290] env[69994]: INFO nova.compute.manager [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Terminating instance [ 1074.308418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49482a11-3eb0-4db7-bbd6-47859afee7f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.320370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91b7e74-aa13-4ea4-8728-b0a13e1038c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.336075] env[69994]: DEBUG nova.compute.provider_tree [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.338178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.453034] env[69994]: INFO nova.compute.manager [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Shelve offloading [ 1074.483656] env[69994]: DEBUG nova.objects.base [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1074.483913] env[69994]: DEBUG nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1074.556355] env[69994]: DEBUG nova.policy [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1074.633616] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926108, 'name': Destroy_Task, 'duration_secs': 0.344711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.633616] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroyed the VM [ 1074.633828] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1074.634112] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7634b8d4-cfa6-42db-93c5-2ac9f1a26655 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.641543] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1074.641543] env[69994]: value = "task-2926109" [ 1074.641543] env[69994]: _type = "Task" [ 1074.641543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.650486] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926109, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.814090] env[69994]: DEBUG nova.compute.manager [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1074.817019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.817019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33089773-7e91-45bf-8330-91257e7e41d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.826451] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.826451] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d221001-5e92-4668-a85a-bed9786767d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.835396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.843919] env[69994]: DEBUG nova.scheduler.client.report [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.887198] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.887198] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.887198] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore2] 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.887198] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a249fc8-5d28-4fde-94c1-ba1b61669ba9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.893862] env[69994]: DEBUG oslo_vmware.api [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1074.893862] env[69994]: value = "task-2926111" [ 1074.893862] env[69994]: _type = "Task" [ 1074.893862] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.905111] env[69994]: DEBUG oslo_vmware.api [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.925286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "489b68f2-c2f2-4710-a06f-45ad8c577441" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.925286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.960691] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.960986] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9cb0e33a-a594-46d7-9493-4faf75ed7b85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.968096] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1074.968096] env[69994]: value = "task-2926112" [ 1074.968096] env[69994]: _type = "Task" [ 1074.968096] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.982194] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1074.982473] env[69994]: DEBUG nova.compute.manager [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.983321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2d3691-ceba-41f2-b992-e1959776015d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.990237] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.990472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.990689] env[69994]: DEBUG nova.network.neutron [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1075.045896] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1075.071306] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1075.071620] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.071829] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1075.072262] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.072464] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1075.072619] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1075.072858] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1075.073013] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1075.073183] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1075.073354] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1075.073533] env[69994]: DEBUG nova.virt.hardware [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1075.074406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e6a79e-abde-4370-8d40-da1a2e32a246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.088659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfd29d6-328e-46b9-a43d-9b7700790151 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.151631] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926109, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.349059] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.350114] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1075.352675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.383s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.352874] env[69994]: DEBUG nova.objects.instance [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1075.406957] env[69994]: DEBUG oslo_vmware.api [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.406957] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.408235] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.408527] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.408730] env[69994]: INFO nova.compute.manager [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1075.409440] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.409667] env[69994]: DEBUG nova.compute.manager [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.409767] env[69994]: DEBUG nova.network.neutron [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.419112] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1075.419376] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587603', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'name': 'volume-dc4ef527-168d-4d24-a145-554ce6a61cad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68eba44a-0989-47dc-a88b-102d9aa34c5d', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'serial': 'dc4ef527-168d-4d24-a145-554ce6a61cad'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1075.420275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102abe0f-2a61-4e0f-a441-1b48eddc73a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.426454] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1075.442357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086d4b2a-2cd5-45e0-a0a1-545f94106e15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.467577] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-dc4ef527-168d-4d24-a145-554ce6a61cad/volume-dc4ef527-168d-4d24-a145-554ce6a61cad.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.467968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2c0fcbb-d218-4bfd-b3a9-99d00a32ad19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.489125] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1075.489125] env[69994]: value = "task-2926113" [ 1075.489125] env[69994]: _type = "Task" [ 1075.489125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.501112] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926113, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.607328] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.607984] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.653864] env[69994]: DEBUG oslo_vmware.api [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926109, 'name': RemoveSnapshot_Task, 'duration_secs': 0.523672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.656826] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1075.656968] env[69994]: INFO nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 12.00 seconds to snapshot the instance on the hypervisor. [ 1075.858727] env[69994]: DEBUG nova.compute.utils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1075.874033] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1075.874033] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1075.950832] env[69994]: DEBUG nova.policy [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce185b7affb46fd898b46f6db1224f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a10b55bcc104c108604d402ec6d09ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1075.955191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.975408] env[69994]: DEBUG nova.network.neutron [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updating instance_info_cache with network_info: [{"id": "2850c5e6-0790-4289-aab5-45fca743e84f", "address": "fa:16:3e:14:e2:4a", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2850c5e6-07", "ovs_interfaceid": "2850c5e6-0790-4289-aab5-45fca743e84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.002321] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926113, 'name': ReconfigVM_Task, 'duration_secs': 0.391446} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.002533] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-dc4ef527-168d-4d24-a145-554ce6a61cad/volume-dc4ef527-168d-4d24-a145-554ce6a61cad.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.011461] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94e4313a-4598-4c04-894f-722e64c43aa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.025821] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Successfully updated port: dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.034448] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1076.034448] env[69994]: value = "task-2926114" [ 1076.034448] env[69994]: _type = "Task" [ 1076.034448] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.040335] env[69994]: DEBUG nova.compute.manager [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1076.041103] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.041103] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.041103] env[69994]: DEBUG oslo_concurrency.lockutils [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.041305] env[69994]: DEBUG nova.compute.manager [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] No waiting events found dispatching network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1076.041305] env[69994]: WARNING nova.compute.manager [req-aa5db27d-0699-44a7-865d-8785ad239863 req-eeb33070-5697-4426-9880-d9f987aee7d5 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received unexpected event network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b for instance with vm_state building and task_state spawning. [ 1076.050571] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926114, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.127218] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.127933] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.128185] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.128413] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.128491] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.128631] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.128789] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1076.128946] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.151028] env[69994]: DEBUG nova.compute.manager [req-c7517345-def1-4371-be36-da7ef6799a96 req-4d7ec740-835a-4633-9978-5ffdcbf89bc7 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Received event network-vif-deleted-ab89b870-7c43-45dd-878e-c1f922fc3ee4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1076.151028] env[69994]: INFO nova.compute.manager [req-c7517345-def1-4371-be36-da7ef6799a96 req-4d7ec740-835a-4633-9978-5ffdcbf89bc7 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Neutron deleted interface ab89b870-7c43-45dd-878e-c1f922fc3ee4; detaching it from the instance and deleting it from the info cache [ 1076.151028] env[69994]: DEBUG nova.network.neutron [req-c7517345-def1-4371-be36-da7ef6799a96 req-4d7ec740-835a-4633-9978-5ffdcbf89bc7 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.233144] env[69994]: DEBUG nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Found 3 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1076.233364] env[69994]: DEBUG nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Rotating out 1 backups {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1076.233534] env[69994]: DEBUG nova.compute.manager [None req-759e794a-445e-4837-afc9-9c6649ba7d44 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleting image c18ad5ff-2d6b-46c7-9cf7-a9e4b3b5d13f {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1076.339482] env[69994]: DEBUG nova.network.neutron [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.373107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64ef6d54-336c-4b47-8e87-d528a349ef7e tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.374331] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1076.377163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.377373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.383576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.045s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.385499] env[69994]: INFO nova.compute.claims [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.417174] env[69994]: INFO nova.scheduler.client.report [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocations for instance 5b9648a7-f26f-4151-be5c-59991035a529 [ 1076.420926] env[69994]: WARNING oslo_messaging._drivers.amqpdriver [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1076.479404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.531574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.531574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.531574] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.554159] env[69994]: DEBUG oslo_vmware.api [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926114, 'name': ReconfigVM_Task, 'duration_secs': 0.151967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.558543] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587603', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'name': 'volume-dc4ef527-168d-4d24-a145-554ce6a61cad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68eba44a-0989-47dc-a88b-102d9aa34c5d', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'serial': 'dc4ef527-168d-4d24-a145-554ce6a61cad'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1076.562823] env[69994]: DEBUG nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Successfully updated port: dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.628427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.628796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.629692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.629944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.630126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.632424] env[69994]: INFO nova.compute.manager [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Terminating instance [ 1076.634496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.648620] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Successfully created port: da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.656203] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-661ed37c-81b7-48a6-bd36-9a3d5339c872 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.673964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c107176b-6415-4f11-926b-d9db59178ece {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.712803] env[69994]: DEBUG nova.compute.manager [req-c7517345-def1-4371-be36-da7ef6799a96 req-4d7ec740-835a-4633-9978-5ffdcbf89bc7 service nova] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Detach interface failed, port_id=ab89b870-7c43-45dd-878e-c1f922fc3ee4, reason: Instance 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1076.845278] env[69994]: INFO nova.compute.manager [-] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Took 1.43 seconds to deallocate network for instance. [ 1076.862113] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1076.863051] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bd8263-1c59-4db0-b3ef-4bb60beed08e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.871518] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.871798] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09fe41fb-d2d3-4de5-8ab2-451a84bdbe67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.929215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-100f5444-57ef-4648-9ce0-67c6a8b75647 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "5b9648a7-f26f-4151-be5c-59991035a529" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.022s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.951992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.952392] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.952704] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore2] 63e1c67b-6a79-4c09-a835-4ff11e15e981 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.953497] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb8c61c0-7033-4720-9b99-cb2bed4d2924 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.962777] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1076.962777] env[69994]: value = "task-2926116" [ 1076.962777] env[69994]: _type = "Task" [ 1076.962777] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.977961] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.066738] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.066944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.067160] env[69994]: DEBUG nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1077.091233] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1077.138835] env[69994]: DEBUG nova.compute.manager [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.139103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.145080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0a7002-2676-4011-9c66-d002ec6e7416 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.159248] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.159248] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4678d91-384c-4e24-a074-a877d5016b5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.168245] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1077.168245] env[69994]: value = "task-2926117" [ 1077.168245] env[69994]: _type = "Task" [ 1077.168245] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.178615] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.231420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.231420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.294307] env[69994]: DEBUG nova.network.neutron [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.351254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.409748] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1077.413338] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.413843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.414079] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.414423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.414728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.418269] env[69994]: INFO nova.compute.manager [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Terminating instance [ 1077.443066] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1077.443401] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.443602] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1077.443824] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.444014] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1077.444224] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1077.444479] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1077.444675] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1077.444894] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1077.445122] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1077.445351] env[69994]: DEBUG nova.virt.hardware [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1077.446575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae12b2f1-2dff-4cf1-a3f1-5c2deb659d9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.459036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ae0550-3625-4802-8704-6ebc738018f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.475642] env[69994]: DEBUG oslo_vmware.api [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.484959] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.485222] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.485222] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.521733] env[69994]: INFO nova.scheduler.client.report [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance 63e1c67b-6a79-4c09-a835-4ff11e15e981 [ 1077.621091] env[69994]: WARNING nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1077.621741] env[69994]: WARNING nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1077.628955] env[69994]: DEBUG nova.objects.instance [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.681166] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926117, 'name': PowerOffVM_Task, 'duration_secs': 0.261058} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.681529] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.681702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.681968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9175a1f4-eff3-48ca-bae3-e192225c1705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.688589] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916e8110-29fa-4f6c-a71b-de96fe406429 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.697173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729ec0fe-549a-4aed-b123-fe9bca7fc0dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.734435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab3d88f-5476-4fbd-bc26-9a12aa839189 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.737131] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1077.747292] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f324274-c8df-4482-87cc-db9ad823c299 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.761062] env[69994]: DEBUG nova.compute.provider_tree [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.763709] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.763924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.764123] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleting the datastore file [datastore1] 6c81eb8b-78d7-469d-8076-13d8a8f61fec {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.764730] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16f32904-385a-4c13-a9f7-809d29382577 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.774149] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1077.774149] env[69994]: value = "task-2926119" [ 1077.774149] env[69994]: _type = "Task" [ 1077.774149] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.783673] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.796132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.796497] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance network_info: |[{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1077.796910] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:f0:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.804772] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating folder: Project (0af2d3f09d264d4c9bba8747f74383bc). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.805374] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecc14f44-8ee3-4c05-b1e4-95340a092965 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.817557] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created folder: Project (0af2d3f09d264d4c9bba8747f74383bc) in parent group-v587342. [ 1077.818250] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating folder: Instances. Parent ref: group-v587604. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.818250] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad3cd91b-7e4b-444e-bb06-5b69e754157a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.829877] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created folder: Instances in parent group-v587604. [ 1077.830248] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.832799] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.832954] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c22750d9-b42e-48ae-ac40-191419a19a3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.855741] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.855741] env[69994]: value = "task-2926122" [ 1077.855741] env[69994]: _type = "Task" [ 1077.855741] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.865395] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926122, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.922971] env[69994]: DEBUG nova.compute.manager [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.923229] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.924209] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a3086e-3a7c-43d9-973f-3160f3e881ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.935233] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.935553] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5809e2ec-01e6-4e7a-8ab6-18c574c6a20a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.945491] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1077.945491] env[69994]: value = "task-2926123" [ 1077.945491] env[69994]: _type = "Task" [ 1077.945491] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.955025] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.027094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.135213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a03822-624b-4c95-8c8f-c48eec249d6f tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.344s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.264286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.269042] env[69994]: DEBUG nova.scheduler.client.report [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.285273] env[69994]: DEBUG oslo_vmware.api [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174884} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.286131] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.286430] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1078.286615] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1078.286789] env[69994]: INFO nova.compute.manager [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1078.287052] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.287495] env[69994]: DEBUG nova.compute.manager [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1078.287596] env[69994]: DEBUG nova.network.neutron [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1078.370479] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926122, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.459893] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926123, 'name': PowerOffVM_Task, 'duration_secs': 0.178751} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.467242] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1078.467242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1078.467605] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c32bbf7b-92f6-46e2-9bc3-9b4604998b78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.487523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.487816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.489331] env[69994]: DEBUG nova.compute.manager [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.489331] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf17190-73b3-4b1a-8bf3-e8b301229bb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.499466] env[69994]: DEBUG nova.compute.manager [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1078.500112] env[69994]: DEBUG nova.objects.instance [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.540151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1078.540479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1078.540659] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleting the datastore file [datastore1] 922799c0-707c-4f4e-a54c-f015eab0a8d7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.540927] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5cf057a-dff8-4308-994b-1858e2907680 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.549832] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for the task: (returnval){ [ 1078.549832] env[69994]: value = "task-2926125" [ 1078.549832] env[69994]: _type = "Task" [ 1078.549832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.559358] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.618591] env[69994]: DEBUG nova.network.neutron [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "address": "fa:16:3e:a6:cb:86", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980fbc36-1a", "ovs_interfaceid": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "address": "fa:16:3e:b8:bf:02", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda9491b-4a", "ovs_interfaceid": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.659267] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Successfully updated port: da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.771174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.771894] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1078.774805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.939s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.776349] env[69994]: INFO nova.compute.claims [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1078.865976] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926122, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.967227] env[69994]: DEBUG nova.compute.manager [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-plugged-dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.967340] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.967529] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.967695] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.967863] env[69994]: DEBUG nova.compute.manager [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] No waiting events found dispatching network-vif-plugged-dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1078.968231] env[69994]: WARNING nova.compute.manager [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received unexpected event network-vif-plugged-dda9491b-4aa9-4a86-9da0-a1f044e25c0e for instance with vm_state active and task_state None. [ 1078.968231] env[69994]: DEBUG nova.compute.manager [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-changed-dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.968354] env[69994]: DEBUG nova.compute.manager [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing instance network info cache due to event network-changed-dda9491b-4aa9-4a86-9da0-a1f044e25c0e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1078.968508] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.031525] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.031787] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing instance network info cache due to event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1079.031965] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.032124] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.032360] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.060556] env[69994]: DEBUG oslo_vmware.api [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Task: {'id': task-2926125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311426} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.060811] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.060999] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.061199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.061393] env[69994]: INFO nova.compute.manager [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1079.061627] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1079.061854] env[69994]: DEBUG nova.compute.manager [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.061910] env[69994]: DEBUG nova.network.neutron [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.124921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.125622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.125785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.126072] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.126262] env[69994]: DEBUG nova.network.neutron [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Refreshing network info cache for port dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.128099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8998edea-6ae8-464e-ae4d-7af867190202 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.149561] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1079.149751] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.149931] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1079.150066] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.150241] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1079.150435] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1079.150633] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1079.150789] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1079.150950] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1079.151125] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1079.151299] env[69994]: DEBUG nova.virt.hardware [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1079.157666] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1079.158307] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83fc2462-2a1c-47ef-a3a0-b2bcb3658a0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.170841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.170975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.171135] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.181166] env[69994]: DEBUG oslo_vmware.api [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1079.181166] env[69994]: value = "task-2926126" [ 1079.181166] env[69994]: _type = "Task" [ 1079.181166] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.190808] env[69994]: DEBUG oslo_vmware.api [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926126, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.281427] env[69994]: DEBUG nova.compute.utils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.284959] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.284959] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.324869] env[69994]: DEBUG nova.policy [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d6a1603506e4d48a9d2f8bf61475821', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f602778aac0d41c49e73c2450f31d711', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1079.343923] env[69994]: DEBUG nova.network.neutron [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.368180] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926122, 'name': CreateVM_Task, 'duration_secs': 1.380259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.368381] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1079.369072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.369243] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.369562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1079.370087] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc8907f-17ac-4db8-af2c-8f0583cd93bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.375506] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1079.375506] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ddaec2-fc8c-28b0-5e71-6ff55c2a3af3" [ 1079.375506] env[69994]: _type = "Task" [ 1079.375506] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.384482] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ddaec2-fc8c-28b0-5e71-6ff55c2a3af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.508384] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.508724] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48efaecc-497e-47c6-b75e-82ce2202c42d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.519713] env[69994]: DEBUG oslo_vmware.api [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1079.519713] env[69994]: value = "task-2926127" [ 1079.519713] env[69994]: _type = "Task" [ 1079.519713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.530063] env[69994]: DEBUG oslo_vmware.api [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.694258] env[69994]: DEBUG oslo_vmware.api [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926126, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.702916] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Successfully created port: cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.721652] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.755836] env[69994]: DEBUG nova.network.neutron [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.785297] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1079.806693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.847320] env[69994]: INFO nova.compute.manager [-] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Took 1.56 seconds to deallocate network for instance. [ 1079.887167] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ddaec2-fc8c-28b0-5e71-6ff55c2a3af3, 'name': SearchDatastore_Task, 'duration_secs': 0.010867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.888108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.888365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.888584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.888734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.888910] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.889446] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-374bbe3d-b567-40d1-96b8-217ae4248840 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.898436] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updated VIF entry in instance network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.898779] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.903071] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.903071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.904088] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a79c271-5e92-49aa-9e5a-4681cca06151 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.918110] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1079.918110] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cdbe65-44ff-2de2-8a1e-96c0356ebae0" [ 1079.918110] env[69994]: _type = "Task" [ 1079.918110] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.929783] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cdbe65-44ff-2de2-8a1e-96c0356ebae0, 'name': SearchDatastore_Task, 'duration_secs': 0.009786} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.930878] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d14ef619-06c8-49ff-a7f7-12f67d7e829e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.938957] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1079.938957] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f2c10e-cfb3-3637-91da-1194cefb9011" [ 1079.938957] env[69994]: _type = "Task" [ 1079.938957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.947714] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f2c10e-cfb3-3637-91da-1194cefb9011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.028776] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaa127a-b7b3-4a4b-80e1-031cc4b9d771 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.034869] env[69994]: DEBUG oslo_vmware.api [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926127, 'name': PowerOffVM_Task, 'duration_secs': 0.22401} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.035543] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.035772] env[69994]: DEBUG nova.compute.manager [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.036636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae76486-051b-48f3-a416-e944b17b69af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.041836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffab1f2-3ca4-4519-ac0b-4d3dfb5d84f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.045882] env[69994]: DEBUG nova.network.neutron [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.088352] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0350a3-e8e0-426f-beaf-3f3a3110678a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.096838] env[69994]: DEBUG nova.network.neutron [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updated VIF entry in instance network info cache for port dda9491b-4aa9-4a86-9da0-a1f044e25c0e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1080.097338] env[69994]: DEBUG nova.network.neutron [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "address": "fa:16:3e:a6:cb:86", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980fbc36-1a", "ovs_interfaceid": "980fbc36-1a58-4992-a66c-ec31e2a90b67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "address": "fa:16:3e:b8:bf:02", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda9491b-4a", "ovs_interfaceid": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.099506] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37a8ad7-ef41-4ab7-8576-278a4f7cf487 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.114329] env[69994]: DEBUG nova.compute.provider_tree [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.193879] env[69994]: DEBUG oslo_vmware.api [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926126, 'name': ReconfigVM_Task, 'duration_secs': 0.640596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.194182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.195033] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1080.258894] env[69994]: INFO nova.compute.manager [-] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Took 1.20 seconds to deallocate network for instance. [ 1080.353663] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.401751] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.402006] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received event network-vif-unplugged-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.402246] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.402485] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.402657] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.402827] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] No waiting events found dispatching network-vif-unplugged-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1080.403024] env[69994]: WARNING nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received unexpected event network-vif-unplugged-2850c5e6-0790-4289-aab5-45fca743e84f for instance with vm_state shelved_offloaded and task_state None. [ 1080.403213] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Received event network-changed-2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.403395] env[69994]: DEBUG nova.compute.manager [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Refreshing instance network info cache due to event network-changed-2850c5e6-0790-4289-aab5-45fca743e84f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1080.403598] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Acquiring lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.403736] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Acquired lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.403893] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Refreshing network info cache for port 2850c5e6-0790-4289-aab5-45fca743e84f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1080.448778] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f2c10e-cfb3-3637-91da-1194cefb9011, 'name': SearchDatastore_Task, 'duration_secs': 0.010271} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.449061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.449331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.449645] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d6557b1-b498-49ee-81fe-a810f468e6ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.457390] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1080.457390] env[69994]: value = "task-2926128" [ 1080.457390] env[69994]: _type = "Task" [ 1080.457390] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.465317] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.551885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.551885] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance network_info: |[{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1080.552291] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:e8:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52c1f5eb-3d4a-4faa-a30d-2b0a46430791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da352ba6-e52b-4b13-8514-5db1e4d826ee', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.559933] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating folder: Project (1a10b55bcc104c108604d402ec6d09ce). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.564456] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ce190eb-ff25-49d9-be9d-5ee4cad6d8b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.564456] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0f630158-9960-4aa4-9211-cb4954123337 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.077s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.577016] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created folder: Project (1a10b55bcc104c108604d402ec6d09ce) in parent group-v587342. [ 1080.577256] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating folder: Instances. Parent ref: group-v587607. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.578282] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98f59f20-b04f-4fe6-939f-27de42fd08c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.590994] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created folder: Instances in parent group-v587607. [ 1080.591317] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.591577] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.591820] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ce33d55-d7df-487c-bdc5-e4bfd6f4434a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.611441] env[69994]: DEBUG oslo_concurrency.lockutils [req-d12639bb-1ac5-4ce4-ba9c-38149b898822 req-b45da720-f0b4-4ad6-b855-7b3913459fbb service nova] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.617228] env[69994]: DEBUG nova.scheduler.client.report [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.622571] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.622571] env[69994]: value = "task-2926131" [ 1080.622571] env[69994]: _type = "Task" [ 1080.622571] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.639472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.639472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.639737] env[69994]: DEBUG nova.compute.manager [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.641764] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926131, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.641764] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c214e8-cf0f-4219-9085-057681057ca7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.649906] env[69994]: DEBUG nova.compute.manager [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1080.649906] env[69994]: DEBUG nova.objects.instance [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.698779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c8cee4e7-d8dc-45fb-8009-d9cdfa9b151b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-dda9491b-4aa9-4a86-9da0-a1f044e25c0e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.357s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.765046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.797730] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1080.831688] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.831803] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.831993] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.832324] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.832552] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.832785] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.833157] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.833447] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.833670] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.833938] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.834277] env[69994]: DEBUG nova.virt.hardware [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.836083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee99db46-8e68-408b-bc57-f667e5b97048 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.847696] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8d5e5f-8bfc-4627-ae98-9c977bcfb095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.971021] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.971021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.971021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.971021] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9cdfe45-b483-4b93-9cb3-3f7cd12ec1d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.977950] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1080.977950] env[69994]: value = "task-2926132" [ 1080.977950] env[69994]: _type = "Task" [ 1080.977950] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.988406] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926132, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.124199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.124835] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.128469] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.174s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.129515] env[69994]: INFO nova.compute.claims [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.143079] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926131, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.269359] env[69994]: DEBUG nova.objects.instance [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.319726] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Successfully updated port: cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.365833] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updated VIF entry in instance network info cache for port 2850c5e6-0790-4289-aab5-45fca743e84f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.365833] env[69994]: DEBUG nova.network.neutron [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updating instance_info_cache with network_info: [{"id": "2850c5e6-0790-4289-aab5-45fca743e84f", "address": "fa:16:3e:14:e2:4a", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": null, "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2850c5e6-07", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.376113] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.376113] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.376113] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.376113] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.376113] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] No waiting events found dispatching network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.376113] env[69994]: WARNING nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received unexpected event network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee for instance with vm_state building and task_state spawning. [ 1081.376713] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.376987] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing instance network info cache due to event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.377298] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.377537] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.377787] env[69994]: DEBUG nova.network.neutron [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.488817] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926132, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067462} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.489282] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.492174] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3caf061e-e35a-4091-9b1b-2dd3e48b4f66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.516258] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.516968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-581701e2-e962-4ad7-8ac3-980a93131b7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.538304] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1081.538304] env[69994]: value = "task-2926133" [ 1081.538304] env[69994]: _type = "Task" [ 1081.538304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.549053] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926133, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.644163] env[69994]: DEBUG nova.compute.utils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1081.645593] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926131, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.646743] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1081.646938] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1081.657039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.657553] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8164412f-d310-4606-918d-cb8f9ad8b5d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.666018] env[69994]: DEBUG oslo_vmware.api [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1081.666018] env[69994]: value = "task-2926134" [ 1081.666018] env[69994]: _type = "Task" [ 1081.666018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.677815] env[69994]: DEBUG oslo_vmware.api [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.754812] env[69994]: DEBUG nova.policy [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1081.780412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.780606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.780782] env[69994]: DEBUG nova.network.neutron [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.780965] env[69994]: DEBUG nova.objects.instance [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'info_cache' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.822043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.822204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.823848] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.868564] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ca4682f-4e7c-4537-99b5-4fee0d71a9e3 req-4248f8f4-c118-4ecf-9372-6149f4b12272 service nova] Releasing lock "refresh_cache-63e1c67b-6a79-4c09-a835-4ff11e15e981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.051514] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.138872] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926131, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.139925] env[69994]: DEBUG nova.network.neutron [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updated VIF entry in instance network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.140313] env[69994]: DEBUG nova.network.neutron [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.151256] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1082.178227] env[69994]: DEBUG oslo_vmware.api [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926134, 'name': PowerOffVM_Task, 'duration_secs': 0.222659} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.178227] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.178476] env[69994]: DEBUG nova.compute.manager [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1082.181925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2ad14f-5bae-4b2f-949b-5484fee34414 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.284509] env[69994]: DEBUG nova.objects.base [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Object Instance<68eba44a-0989-47dc-a88b-102d9aa34c5d> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1082.314686] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Successfully created port: 858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.359135] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1082.400215] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bace4bc-4a4e-4438-a834-d2e3d0a32599 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.408355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936730d8-c2e0-4ed0-893a-1ae6029a819f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.443447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-980fbc36-1a58-4992-a66c-ec31e2a90b67" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.443447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-980fbc36-1a58-4992-a66c-ec31e2a90b67" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.448060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086782d2-8dec-41df-bbb7-82f74f2327aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.458849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e6347f-208b-4630-aedb-47ad7f067c85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.473404] env[69994]: DEBUG nova.compute.provider_tree [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.516774] env[69994]: DEBUG nova.network.neutron [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.551023] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926133, 'name': ReconfigVM_Task, 'duration_secs': 0.854537} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.552753] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.553435] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fe9c7ea-14ae-4608-b2ab-02dbb7e90443 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.560880] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1082.560880] env[69994]: value = "task-2926135" [ 1082.560880] env[69994]: _type = "Task" [ 1082.560880] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.571399] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926135, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.639115] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926131, 'name': CreateVM_Task, 'duration_secs': 1.575753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.639717] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.640365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.640616] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.640982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.641320] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64d6056-4ed5-4ec6-9188-44f70cec1174 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.643422] env[69994]: DEBUG oslo_concurrency.lockutils [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.643674] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Received event network-vif-deleted-dea09a3f-a839-4d7e-aa69-37bee8855d79 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.643881] env[69994]: DEBUG nova.compute.manager [req-a67aaa14-dba8-488c-a62b-1fd833eb476a req-94b5e66b-7973-4b66-b251-4c5fd816df90 service nova] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Received event network-vif-deleted-200e0161-2f5b-4939-90ae-3eb3457ffac7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.647843] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1082.647843] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a63f0f-ed9b-fd1a-d4d9-81581ef07e54" [ 1082.647843] env[69994]: _type = "Task" [ 1082.647843] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.663186] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a63f0f-ed9b-fd1a-d4d9-81581ef07e54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.696252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29f7d72a-1e5d-4652-aab6-9ef90183c552 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.946262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.946262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.946512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b5d902-162d-4f82-bac6-afb09460dbce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.967986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bd7a40-7222-4ec2-bfdc-107aa1bc8796 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.976775] env[69994]: DEBUG nova.scheduler.client.report [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.008176] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1083.011690] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3cca1c4-052b-43ca-9d7c-757c47f45c05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.025387] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.025702] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Instance network_info: |[{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.026442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:cb:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd532a29-1d4e-4026-89d2-9ef034f808a1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.034016] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.035432] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.035655] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b35e62ac-d809-4ec2-a601-5bc87a523dfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.052408] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1083.052408] env[69994]: value = "task-2926136" [ 1083.052408] env[69994]: _type = "Task" [ 1083.052408] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.058775] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.058775] env[69994]: value = "task-2926137" [ 1083.058775] env[69994]: _type = "Task" [ 1083.058775] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.070895] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.078939] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926137, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.082354] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926135, 'name': Rename_Task, 'duration_secs': 0.151507} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.082781] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1083.082843] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-470459f9-d3a2-4969-a9f4-0736e2ee1150 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.091123] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1083.091123] env[69994]: value = "task-2926138" [ 1083.091123] env[69994]: _type = "Task" [ 1083.091123] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.100777] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.129635] env[69994]: DEBUG nova.network.neutron [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.164146] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.167726] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a63f0f-ed9b-fd1a-d4d9-81581ef07e54, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.167726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.167726] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.167891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.169845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.169845] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.169845] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ca93892-4d4e-4701-be4a-b93476cb5519 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.184176] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.184501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.185421] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fbdd3d6-879a-401d-9602-02a051a7b7c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.194234] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1083.194234] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284f3d7-f9e3-6176-9898-026b2a003218" [ 1083.194234] env[69994]: _type = "Task" [ 1083.194234] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.206189] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284f3d7-f9e3-6176-9898-026b2a003218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.210660] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.210955] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.211136] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.211327] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.211474] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.212074] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.212074] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.212074] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.212192] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.212314] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.212504] env[69994]: DEBUG nova.virt.hardware [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.213408] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810ac1b-907a-4006-8a20-edcd438aa33e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.225370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c429d13-eebd-4543-be2d-bf388697c643 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.406769] env[69994]: DEBUG nova.compute.manager [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Received event network-vif-plugged-cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.406990] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.407370] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.407370] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.407526] env[69994]: DEBUG nova.compute.manager [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] No waiting events found dispatching network-vif-plugged-cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.407696] env[69994]: WARNING nova.compute.manager [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Received unexpected event network-vif-plugged-cd532a29-1d4e-4026-89d2-9ef034f808a1 for instance with vm_state building and task_state spawning. [ 1083.407854] env[69994]: DEBUG nova.compute.manager [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Received event network-changed-cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.408423] env[69994]: DEBUG nova.compute.manager [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Refreshing instance network info cache due to event network-changed-cd532a29-1d4e-4026-89d2-9ef034f808a1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1083.408622] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Acquiring lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.408776] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Acquired lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.408941] env[69994]: DEBUG nova.network.neutron [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Refreshing network info cache for port cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.503807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.504360] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.507117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.507292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.507447] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1083.507728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.157s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.507927] env[69994]: DEBUG nova.objects.instance [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'resources' on Instance uuid 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.510946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfb6bf0-a974-4c31-9300-f69804ae3e4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.521158] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ac585c-c6cb-44bd-94c6-9eaf3794abdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.539588] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faad4f1-2a4b-462a-a399-50ad7800def0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.550243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abd1eae-6cb5-41f2-ab32-9d202b633705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.590738] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.591517] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178907MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1083.591655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.604539] env[69994]: DEBUG oslo_vmware.api [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926138, 'name': PowerOnVM_Task, 'duration_secs': 0.496808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.607705] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.607922] env[69994]: INFO nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1083.608167] env[69994]: DEBUG nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.608368] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926137, 'name': CreateVM_Task, 'duration_secs': 0.360424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.609210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d908ee74-1ad1-460f-9c43-c5b0184941fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.611730] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.612392] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.612576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.612906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.613541] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa52321-a613-49bf-9458-2c2811a9288f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.622381] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1083.622381] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529de403-e641-2366-8a58-592ed0789c89" [ 1083.622381] env[69994]: _type = "Task" [ 1083.622381] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.637050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.638612] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529de403-e641-2366-8a58-592ed0789c89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.706948] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5284f3d7-f9e3-6176-9898-026b2a003218, 'name': SearchDatastore_Task, 'duration_secs': 0.012814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.707804] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b7d0c87-9a1b-4920-8a12-9f9176384e8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.716797] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1083.716797] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f74e68-4673-1052-c079-5a9e29b193a1" [ 1083.716797] env[69994]: _type = "Task" [ 1083.716797] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.727323] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f74e68-4673-1052-c079-5a9e29b193a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.018483] env[69994]: DEBUG nova.compute.utils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.021698] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1084.021698] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1084.069438] env[69994]: DEBUG nova.policy [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b75c933cf4814107af5a169a269d51a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6df35f7e7a843648cacb18fc8dde527', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.075905] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.081019] env[69994]: DEBUG nova.compute.manager [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Received event network-vif-plugged-858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.082326] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] Acquiring lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.082584] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.082708] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.082877] env[69994]: DEBUG nova.compute.manager [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] No waiting events found dispatching network-vif-plugged-858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1084.083053] env[69994]: WARNING nova.compute.manager [req-a8e97598-4e27-432d-867a-3ca014f43ff8 req-a59953da-06d9-4b9f-a76a-be266b178dd5 service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Received unexpected event network-vif-plugged-858e65bf-8ce8-45e6-878b-36f3c884077d for instance with vm_state building and task_state spawning. [ 1084.134738] env[69994]: INFO nova.compute.manager [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Took 17.19 seconds to build instance. [ 1084.145162] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529de403-e641-2366-8a58-592ed0789c89, 'name': SearchDatastore_Task, 'duration_secs': 0.022263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.153768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.153768] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.153768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.230844] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f74e68-4673-1052-c079-5a9e29b193a1, 'name': SearchDatastore_Task, 'duration_secs': 0.011601} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.233924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.234218] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.235405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.235405] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.235405] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee958648-2cb6-4fd5-bf4c-291d0f818e64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.239496] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e705095-d3e4-49f7-88e5-693cbd78847b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.249184] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1084.249184] env[69994]: value = "task-2926139" [ 1084.249184] env[69994]: _type = "Task" [ 1084.249184] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.249782] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.249954] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.254018] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6c9b661-ca04-44b8-80a9-b03c079e7cb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.265353] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.267041] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1084.267041] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52763681-2254-57c9-6f8c-e61b0a9aa3f6" [ 1084.267041] env[69994]: _type = "Task" [ 1084.267041] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.277530] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52763681-2254-57c9-6f8c-e61b0a9aa3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.011772} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.281114] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb281506-6b25-4748-9c31-64701edcfb95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.287251] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1084.287251] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbd947-8954-c791-e8b3-6dd7d1bcff8b" [ 1084.287251] env[69994]: _type = "Task" [ 1084.287251] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.293178] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61f3b95-bcff-448d-9f1f-6494126a53c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.299796] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbd947-8954-c791-e8b3-6dd7d1bcff8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.305390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e7245e-0f45-4334-87b4-10e568b17a5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.310840] env[69994]: DEBUG nova.network.neutron [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updated VIF entry in instance network info cache for port cd532a29-1d4e-4026-89d2-9ef034f808a1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.311088] env[69994]: DEBUG nova.network.neutron [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.316450] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Successfully updated port: 858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.316450] env[69994]: DEBUG nova.compute.manager [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Stashing vm_state: stopped {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1084.352623] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fc3ce3-7409-49e6-937f-720754394b1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.362544] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f39d23-65ca-4c88-a2b6-30a6e6341eea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.383656] env[69994]: DEBUG nova.compute.provider_tree [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.524349] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1084.568143] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.637361] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Successfully created port: a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.640586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-56d8c571-8871-4d2b-8123-ca9e4b0b1759 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.714s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.658920] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.659316] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c332728-dfcf-4460-98b5-b6ec5c347ee8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.668790] env[69994]: DEBUG oslo_vmware.api [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1084.668790] env[69994]: value = "task-2926140" [ 1084.668790] env[69994]: _type = "Task" [ 1084.668790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.678745] env[69994]: DEBUG oslo_vmware.api [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.761766] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.762073] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.762344] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.765701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c0b69af-d75d-4ea3-a053-499d35bfac1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.771183] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1084.771183] env[69994]: value = "task-2926141" [ 1084.771183] env[69994]: _type = "Task" [ 1084.771183] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.780385] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.798750] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dbd947-8954-c791-e8b3-6dd7d1bcff8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.799201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.799674] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.800061] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-011ca140-72dd-461a-aa18-c8b0ce4b7560 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.808991] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1084.808991] env[69994]: value = "task-2926142" [ 1084.808991] env[69994]: _type = "Task" [ 1084.808991] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.819431] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e7a01e-7a0a-4963-b7c9-c3a1c42dca55 req-54a634c8-b9bf-4458-853a-4081c40f976f service nova] Releasing lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.819847] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.848646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.848646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.848934] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1084.878026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.889159] env[69994]: DEBUG nova.scheduler.client.report [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.068214] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.188440] env[69994]: DEBUG oslo_vmware.api [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.287193] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098987} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.287193] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.287193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867d211f-10e9-448d-bfe2-7870ab7e6161 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.313571] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.313960] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d03d4121-eb9c-4047-9c67-0d1b26916ec9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.342620] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49385} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.346253] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.346253] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.346253] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1085.346253] env[69994]: value = "task-2926143" [ 1085.346253] env[69994]: _type = "Task" [ 1085.346253] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.346253] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d09733a9-3dfa-4dd3-a8ae-d13a641e610d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.362226] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926143, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.363732] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1085.363732] env[69994]: value = "task-2926144" [ 1085.363732] env[69994]: _type = "Task" [ 1085.363732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.373762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.374090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.379678] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.393544] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.396992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.369s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.396992] env[69994]: DEBUG nova.objects.instance [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'resources' on Instance uuid 63e1c67b-6a79-4c09-a835-4ff11e15e981 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.402442] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.422522] env[69994]: INFO nova.scheduler.client.report [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocations for instance 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05 [ 1085.535776] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1085.567467] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.567723] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.567879] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.568072] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.568225] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.568373] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.568581] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.568748] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.568916] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.569094] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.569274] env[69994]: DEBUG nova.virt.hardware [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.570127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ad8997-b214-473b-abe2-8bcf48b500fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.574352] env[69994]: DEBUG nova.network.neutron [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Updating instance_info_cache with network_info: [{"id": "858e65bf-8ce8-45e6-878b-36f3c884077d", "address": "fa:16:3e:0f:62:c1", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858e65bf-8c", "ovs_interfaceid": "858e65bf-8ce8-45e6-878b-36f3c884077d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.579467] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.585892] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eedbced-27da-468f-8ec7-9852afe9129d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.682070] env[69994]: DEBUG oslo_vmware.api [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926140, 'name': PowerOnVM_Task, 'duration_secs': 0.600868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.682542] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.682628] env[69994]: DEBUG nova.compute.manager [None req-e118f740-8dee-4d31-9372-3c77b4d44aee tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.683368] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571d94e7-6a7c-40a9-bc86-4c9206f0b705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.858097] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.873815] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.874111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.875184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abbc826-0deb-44f5-bbde-cdad072b2c95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.880869] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1085.901637] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.902645] env[69994]: DEBUG nova.objects.instance [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'numa_topology' on Instance uuid 63e1c67b-6a79-4c09-a835-4ff11e15e981 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.903572] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd1d370f-1c20-4e45-87a2-10c51ad28e6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.930877] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1085.930877] env[69994]: value = "task-2926145" [ 1085.930877] env[69994]: _type = "Task" [ 1085.930877] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.933924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8740af06-2951-4b46-a148-5069a1e5a2ae tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "9dbaceb8-fa4d-40c4-9f0e-fa9749663a05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.631s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.945392] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.074063] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.080593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.080938] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Instance network_info: |[{"id": "858e65bf-8ce8-45e6-878b-36f3c884077d", "address": "fa:16:3e:0f:62:c1", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858e65bf-8c", "ovs_interfaceid": "858e65bf-8ce8-45e6-878b-36f3c884077d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.081463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:62:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '858e65bf-8ce8-45e6-878b-36f3c884077d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.091240] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.091240] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.091240] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ea49449-a13e-4a3f-9da5-76619541caae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.114209] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.114209] env[69994]: value = "task-2926146" [ 1086.114209] env[69994]: _type = "Task" [ 1086.114209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.121861] env[69994]: DEBUG nova.compute.manager [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Received event network-changed-858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.121861] env[69994]: DEBUG nova.compute.manager [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Refreshing instance network info cache due to event network-changed-858e65bf-8ce8-45e6-878b-36f3c884077d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1086.121861] env[69994]: DEBUG oslo_concurrency.lockutils [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] Acquiring lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.121861] env[69994]: DEBUG oslo_concurrency.lockutils [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] Acquired lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.121861] env[69994]: DEBUG nova.network.neutron [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Refreshing network info cache for port 858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.130109] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926146, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.365242] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926143, 'name': ReconfigVM_Task, 'duration_secs': 0.945266} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.365841] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.367692] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a05e2146-f2d6-42ca-8a49-045c2a8eb163 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.380017] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1086.380017] env[69994]: value = "task-2926147" [ 1086.380017] env[69994]: _type = "Task" [ 1086.380017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.395452] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926147, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.412163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.422077] env[69994]: DEBUG nova.objects.base [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Object Instance<63e1c67b-6a79-4c09-a835-4ff11e15e981> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1086.442343] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926145, 'name': ReconfigVM_Task, 'duration_secs': 0.30313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.445125] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfigured VM instance instance-0000005f to attach disk [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.446699] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9491f195-9d3c-47ef-958e-e2c87935c86d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.453707] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1086.453707] env[69994]: value = "task-2926148" [ 1086.453707] env[69994]: _type = "Task" [ 1086.453707] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.469211] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926148, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.574766] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.630677] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926146, 'name': CreateVM_Task, 'duration_secs': 0.426494} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.633988] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1086.634909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.635062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.635399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1086.636600] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6289b8a-e932-4b09-a94b-78eb144f8384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.641592] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1086.641592] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52024e56-2478-5a6c-27bc-da2bd3e0f837" [ 1086.641592] env[69994]: _type = "Task" [ 1086.641592] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.654159] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52024e56-2478-5a6c-27bc-da2bd3e0f837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.721868] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b3718c-09b6-4161-81d6-9396ce9a9f54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.730479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8a64cc-c698-426e-b73d-085558bd3a71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.770727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1dd080-3339-4ce3-b41a-9d83080b31d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.780616] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4e9b56-2e26-425f-b595-7be8becf973e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.796675] env[69994]: DEBUG nova.compute.provider_tree [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.801679] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Successfully updated port: a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.894146] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926147, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.964386] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926148, 'name': Rename_Task, 'duration_secs': 0.164657} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.964386] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.964550] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cf23f1d-4056-4875-a208-375c876d77ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.972914] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1086.972914] env[69994]: value = "task-2926149" [ 1086.972914] env[69994]: _type = "Task" [ 1086.972914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.984923] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.020799] env[69994]: DEBUG nova.network.neutron [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Updated VIF entry in instance network info cache for port 858e65bf-8ce8-45e6-878b-36f3c884077d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.021201] env[69994]: DEBUG nova.network.neutron [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Updating instance_info_cache with network_info: [{"id": "858e65bf-8ce8-45e6-878b-36f3c884077d", "address": "fa:16:3e:0f:62:c1", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858e65bf-8c", "ovs_interfaceid": "858e65bf-8ce8-45e6-878b-36f3c884077d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.071969] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.152402] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52024e56-2478-5a6c-27bc-da2bd3e0f837, 'name': SearchDatastore_Task, 'duration_secs': 0.011018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.152777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.153033] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1087.153310] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.153464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.153662] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.154093] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1ce1aa8-a13f-40db-875a-b416d3842598 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.164066] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.164259] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1087.165367] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92cd7c6b-6465-4e16-928b-6cd4f80fd559 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.171757] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1087.171757] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b23fde-b917-9baa-d7b9-0e9ffb264d42" [ 1087.171757] env[69994]: _type = "Task" [ 1087.171757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.181125] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b23fde-b917-9baa-d7b9-0e9ffb264d42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.302333] env[69994]: DEBUG nova.scheduler.client.report [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.307148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.307148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquired lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.307263] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.391288] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926147, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.485532] env[69994]: DEBUG oslo_vmware.api [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926149, 'name': PowerOnVM_Task, 'duration_secs': 0.448745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.486031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.489017] env[69994]: INFO nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Took 6.69 seconds to spawn the instance on the hypervisor. [ 1087.489017] env[69994]: DEBUG nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.489017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ff4c06-bb2b-4efd-b706-86dd71b8f485 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.497601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.498649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.524926] env[69994]: DEBUG oslo_concurrency.lockutils [req-0aca0ef8-1216-4661-a7f4-fd667752f09c req-9fb35ca6-903f-40ad-853d-741be922094e service nova] Releasing lock "refresh_cache-a7dd4e89-a953-49b4-b56f-fdacef3a621b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.574830] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.684853] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b23fde-b917-9baa-d7b9-0e9ffb264d42, 'name': SearchDatastore_Task, 'duration_secs': 0.013938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.686055] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42675b2f-0fd7-4cc8-ab3d-4e56412d7678 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.693228] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1087.693228] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528c8464-ae75-8a95-9480-918039b48c6a" [ 1087.693228] env[69994]: _type = "Task" [ 1087.693228] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.702508] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528c8464-ae75-8a95-9480-918039b48c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.809937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.413s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.816148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.551s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.817737] env[69994]: INFO nova.compute.claims [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.864323] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.898425] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926147, 'name': Rename_Task, 'duration_secs': 1.236011} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.898842] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.899505] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d212315-0f1c-4507-9b66-43f5702099ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.908245] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1087.908245] env[69994]: value = "task-2926150" [ 1087.908245] env[69994]: _type = "Task" [ 1087.908245] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.917041] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.007032] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1088.020971] env[69994]: INFO nova.compute.manager [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Took 13.71 seconds to build instance. [ 1088.072812] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.083214] env[69994]: DEBUG nova.network.neutron [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updating instance_info_cache with network_info: [{"id": "a40388ce-c3c1-480f-8e1e-160c56294eab", "address": "fa:16:3e:d4:25:ac", "network": {"id": "8bc3ada1-0296-4b96-9057-67d0d9a1fb35", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-529481842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6df35f7e7a843648cacb18fc8dde527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40388ce-c3", "ovs_interfaceid": "a40388ce-c3c1-480f-8e1e-160c56294eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.205534] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528c8464-ae75-8a95-9480-918039b48c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.014652} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.205801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.206595] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] a7dd4e89-a953-49b4-b56f-fdacef3a621b/a7dd4e89-a953-49b4-b56f-fdacef3a621b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1088.206595] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-026e3151-48fe-4ae5-ba2b-fa8e9782abfd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.214957] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1088.214957] env[69994]: value = "task-2926151" [ 1088.214957] env[69994]: _type = "Task" [ 1088.214957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.224687] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.232191] env[69994]: DEBUG nova.compute.manager [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Received event network-vif-plugged-a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.232191] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Acquiring lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.232191] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.232191] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.232191] env[69994]: DEBUG nova.compute.manager [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] No waiting events found dispatching network-vif-plugged-a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1088.232191] env[69994]: WARNING nova.compute.manager [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Received unexpected event network-vif-plugged-a40388ce-c3c1-480f-8e1e-160c56294eab for instance with vm_state building and task_state spawning. [ 1088.232191] env[69994]: DEBUG nova.compute.manager [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Received event network-changed-a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.232191] env[69994]: DEBUG nova.compute.manager [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Refreshing instance network info cache due to event network-changed-a40388ce-c3c1-480f-8e1e-160c56294eab. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1088.233134] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Acquiring lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.320514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41fbec04-d5a6-4941-bbbf-68e0971732f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.869s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.322131] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.516s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.322468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.322705] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.322880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.333045] env[69994]: INFO nova.compute.manager [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Terminating instance [ 1088.419358] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926150, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.523150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-16b5b9a1-d647-4284-8eeb-7193ba556cad tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.223s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.534446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.577852] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.585674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Releasing lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.586062] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Instance network_info: |[{"id": "a40388ce-c3c1-480f-8e1e-160c56294eab", "address": "fa:16:3e:d4:25:ac", "network": {"id": "8bc3ada1-0296-4b96-9057-67d0d9a1fb35", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-529481842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6df35f7e7a843648cacb18fc8dde527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40388ce-c3", "ovs_interfaceid": "a40388ce-c3c1-480f-8e1e-160c56294eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.586390] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Acquired lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.586569] env[69994]: DEBUG nova.network.neutron [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Refreshing network info cache for port a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.588019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:25:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a40388ce-c3c1-480f-8e1e-160c56294eab', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.596743] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Creating folder: Project (b6df35f7e7a843648cacb18fc8dde527). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.598299] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bd9152d-ba8a-4deb-aec0-29af80fe74a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.615128] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Created folder: Project (b6df35f7e7a843648cacb18fc8dde527) in parent group-v587342. [ 1088.615421] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Creating folder: Instances. Parent ref: group-v587612. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.615709] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4285cdf2-dd5f-460f-8abe-9a75dd436162 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.651686] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Created folder: Instances in parent group-v587612. [ 1088.652140] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.652506] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.652799] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fdc1160-ff3b-43f9-8ef0-68165dbcb717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.682764] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.682764] env[69994]: value = "task-2926154" [ 1088.682764] env[69994]: _type = "Task" [ 1088.682764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.696026] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926154, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.727080] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926151, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.837856] env[69994]: DEBUG nova.compute.manager [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1088.838195] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.838685] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54a0fc51-d0a6-4b7e-944f-5107c4cd95b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.857049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb0d072-43f4-4917-ad48-f732e9d34df8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.876944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.877292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.905956] env[69994]: WARNING nova.virt.vmwareapi.vmops [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63e1c67b-6a79-4c09-a835-4ff11e15e981 could not be found. [ 1088.906235] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.906400] env[69994]: INFO nova.compute.manager [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1088.906659] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.909836] env[69994]: DEBUG nova.compute.manager [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1088.909890] env[69994]: DEBUG nova.network.neutron [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.922466] env[69994]: DEBUG oslo_vmware.api [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926150, 'name': PowerOnVM_Task, 'duration_secs': 0.713362} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.923432] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.923432] env[69994]: INFO nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Took 11.51 seconds to spawn the instance on the hypervisor. [ 1088.923432] env[69994]: DEBUG nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.924099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fa4544-3b31-4b73-b6e4-ba35943a9297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.080997] env[69994]: DEBUG oslo_vmware.api [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926136, 'name': ReconfigVM_Task, 'duration_secs': 5.867422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.081360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.081631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1089.137285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c304bd05-06e8-4bd3-ab51-f99656a31646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.148462] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e235388-89fd-45de-9eac-633b61691707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.183936] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc90fb6-13c7-486b-9e75-ca1f87ed83bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.198131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605bc4df-fb26-4666-a647-b52410b91fbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.202444] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926154, 'name': CreateVM_Task, 'duration_secs': 0.407582} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.202607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.203792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.203958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.204288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1089.204569] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8480a330-896f-4ecc-b2c3-7f4f958cb5b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.214404] env[69994]: DEBUG nova.compute.provider_tree [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.221019] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1089.221019] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1f287-c419-ffab-6e13-e284a8accf38" [ 1089.221019] env[69994]: _type = "Task" [ 1089.221019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.229487] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582167} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.231253] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] a7dd4e89-a953-49b4-b56f-fdacef3a621b/a7dd4e89-a953-49b4-b56f-fdacef3a621b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1089.231253] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1089.231738] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ede11981-3bb5-4423-89fa-15983f8d54d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.236701] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1f287-c419-ffab-6e13-e284a8accf38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.242156] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1089.242156] env[69994]: value = "task-2926155" [ 1089.242156] env[69994]: _type = "Task" [ 1089.242156] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.252081] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.381246] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1089.448793] env[69994]: INFO nova.compute.manager [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Took 20.94 seconds to build instance. [ 1089.477047] env[69994]: DEBUG nova.network.neutron [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updated VIF entry in instance network info cache for port a40388ce-c3c1-480f-8e1e-160c56294eab. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.477462] env[69994]: DEBUG nova.network.neutron [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updating instance_info_cache with network_info: [{"id": "a40388ce-c3c1-480f-8e1e-160c56294eab", "address": "fa:16:3e:d4:25:ac", "network": {"id": "8bc3ada1-0296-4b96-9057-67d0d9a1fb35", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-529481842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6df35f7e7a843648cacb18fc8dde527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40388ce-c3", "ovs_interfaceid": "a40388ce-c3c1-480f-8e1e-160c56294eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.718130] env[69994]: DEBUG nova.scheduler.client.report [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.732079] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f1f287-c419-ffab-6e13-e284a8accf38, 'name': SearchDatastore_Task, 'duration_secs': 0.041688} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.732380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.732656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.732900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.733059] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.733244] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.734095] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53f11d8e-74c5-47e0-9f72-23c49abf8cbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.750078] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.750365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.753656] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a036bb12-9982-46fc-83d9-66708c0b190e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.757707] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.757951] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1089.759020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db41eb2-df99-49f0-8fed-f33f8d85dd03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.762285] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1089.762285] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd0584-c527-6840-bb4f-5854289491a2" [ 1089.762285] env[69994]: _type = "Task" [ 1089.762285] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.783290] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] a7dd4e89-a953-49b4-b56f-fdacef3a621b/a7dd4e89-a953-49b4-b56f-fdacef3a621b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.783922] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73556ab1-ad3f-4702-a794-d9b6d82cea8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.800802] env[69994]: DEBUG nova.network.neutron [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.801966] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd0584-c527-6840-bb4f-5854289491a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.809541] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1089.809541] env[69994]: value = "task-2926156" [ 1089.809541] env[69994]: _type = "Task" [ 1089.809541] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.818155] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.904435] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.950041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b1309ee1-3d69-4f7f-8411-57816cb6efe8 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.455s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.979976] env[69994]: DEBUG oslo_concurrency.lockutils [req-65576966-0747-4bcd-aaf3-aab7900bc647 req-ab116396-72b1-4cad-a35b-6b529c1775c3 service nova] Releasing lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.223246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.227021] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.227021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.227092] env[69994]: DEBUG nova.objects.instance [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lazy-loading 'resources' on Instance uuid 6c81eb8b-78d7-469d-8076-13d8a8f61fec {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.275199] env[69994]: DEBUG nova.compute.manager [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-deleted-980fbc36-1a58-4992-a66c-ec31e2a90b67 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.275199] env[69994]: INFO nova.compute.manager [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Neutron deleted interface 980fbc36-1a58-4992-a66c-ec31e2a90b67; detaching it from the instance and deleting it from the info cache [ 1090.275199] env[69994]: DEBUG nova.network.neutron [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "address": "fa:16:3e:b8:bf:02", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda9491b-4a", "ovs_interfaceid": "dda9491b-4aa9-4a86-9da0-a1f044e25c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.283198] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd0584-c527-6840-bb4f-5854289491a2, 'name': SearchDatastore_Task, 'duration_secs': 0.047628} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.284048] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cd5b1c5-c1fe-4f09-9d71-03484b7de059 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.292108] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1090.292108] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ef0318-9e68-b718-6102-38045e5b0028" [ 1090.292108] env[69994]: _type = "Task" [ 1090.292108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.304940] env[69994]: INFO nova.compute.manager [-] [instance: 63e1c67b-6a79-4c09-a835-4ff11e15e981] Took 1.39 seconds to deallocate network for instance. [ 1090.306226] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ef0318-9e68-b718-6102-38045e5b0028, 'name': SearchDatastore_Task, 'duration_secs': 0.010621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.308339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.308631] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 489b68f2-c2f2-4710-a06f-45ad8c577441/489b68f2-c2f2-4710-a06f-45ad8c577441.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.316019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cb53a27-2515-4316-aa38-b3656f29074d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.320536] env[69994]: DEBUG nova.compute.manager [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1090.331278] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926156, 'name': ReconfigVM_Task, 'duration_secs': 0.280133} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.332774] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Reconfigured VM instance instance-00000060 to attach disk [datastore2] a7dd4e89-a953-49b4-b56f-fdacef3a621b/a7dd4e89-a953-49b4-b56f-fdacef3a621b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.333522] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1090.333522] env[69994]: value = "task-2926157" [ 1090.333522] env[69994]: _type = "Task" [ 1090.333522] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.333709] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2284a126-37bd-4ccb-b9de-3a22159d7c2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.348966] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.352769] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1090.352769] env[69994]: value = "task-2926158" [ 1090.352769] env[69994]: _type = "Task" [ 1090.352769] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.362722] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926158, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.432134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.432351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.432554] env[69994]: DEBUG nova.network.neutron [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.723510] env[69994]: DEBUG nova.compute.manager [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.723634] env[69994]: DEBUG nova.compute.manager [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing instance network info cache due to event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1090.723814] env[69994]: DEBUG oslo_concurrency.lockutils [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.723962] env[69994]: DEBUG oslo_concurrency.lockutils [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.724177] env[69994]: DEBUG nova.network.neutron [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.730698] env[69994]: DEBUG nova.compute.utils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.738429] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.739067] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.777721] env[69994]: DEBUG oslo_concurrency.lockutils [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.777917] env[69994]: DEBUG oslo_concurrency.lockutils [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] Acquired lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.778858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4e5730-31e9-4cc5-8265-430d3141c697 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.798840] env[69994]: DEBUG oslo_concurrency.lockutils [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] Releasing lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.799167] env[69994]: WARNING nova.compute.manager [req-411ef4f7-2a0c-446b-8d71-10bf2c7e50c2 req-b40b6ca1-a7d4-4e28-a770-b46dc9612010 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Detach interface failed, port_id=980fbc36-1a58-4992-a66c-ec31e2a90b67, reason: No device with interface-id 980fbc36-1a58-4992-a66c-ec31e2a90b67 exists on VM: nova.exception.NotFound: No device with interface-id 980fbc36-1a58-4992-a66c-ec31e2a90b67 exists on VM [ 1090.804071] env[69994]: DEBUG nova.policy [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7be902c21aba40e1ac159ffa787eea04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d72179a46b64984b9ef219161bfcd76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.842926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.849786] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926157, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.863361] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926158, 'name': Rename_Task, 'duration_secs': 0.153547} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.863695] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1090.864067] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5647372b-7939-4d32-914b-27643a6a9943 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.877380] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1090.877380] env[69994]: value = "task-2926159" [ 1090.877380] env[69994]: _type = "Task" [ 1090.877380] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.893077] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.015521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.015807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.015989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.016215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.016406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.018914] env[69994]: INFO nova.compute.manager [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Terminating instance [ 1091.041753] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375574cf-1a03-4d1c-a6c4-1fe3bf67bd67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.050243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f08e18-f72e-4d26-b2fe-27c593213965 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.084190] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accd1ece-274e-40cc-925d-8d94121b6cf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.092797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b82750-3862-4b34-ba00-91d8438c8681 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.107467] env[69994]: DEBUG nova.compute.provider_tree [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.201125] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Successfully created port: 54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.225697] env[69994]: INFO nova.network.neutron [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Port dda9491b-4aa9-4a86-9da0-a1f044e25c0e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1091.225697] env[69994]: DEBUG nova.network.neutron [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [{"id": "cf663439-5f58-4ebe-9323-5937dcc425a7", "address": "fa:16:3e:0e:91:27", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf663439-5f", "ovs_interfaceid": "cf663439-5f58-4ebe-9323-5937dcc425a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.237713] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.346837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0dc5e367-bb36-447c-a421-eba3a493d121 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "63e1c67b-6a79-4c09-a835-4ff11e15e981" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.025s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.354722] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.356027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 489b68f2-c2f2-4710-a06f-45ad8c577441/489b68f2-c2f2-4710-a06f-45ad8c577441.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.356027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.356027] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc2d8c30-525e-433e-8e5a-640249004b39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.365526] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1091.365526] env[69994]: value = "task-2926160" [ 1091.365526] env[69994]: _type = "Task" [ 1091.365526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.382464] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.394475] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926159, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.515519] env[69994]: DEBUG nova.network.neutron [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updated VIF entry in instance network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1091.515924] env[69994]: DEBUG nova.network.neutron [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.522292] env[69994]: DEBUG nova.compute.manager [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.522491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.523394] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eeba0c-b0f9-4a5e-823d-b4ec6f1042ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.533935] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.534358] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6957ca2-6842-4e8e-b490-3b571bd3a767 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.541823] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1091.541823] env[69994]: value = "task-2926161" [ 1091.541823] env[69994]: _type = "Task" [ 1091.541823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.553060] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.613453] env[69994]: DEBUG nova.scheduler.client.report [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.730597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-d5af7ae1-d68e-4170-b762-e56d7f2551d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.876317] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080187} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.876614] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.877464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be67ae4-596d-4e84-963b-b7165d1dc338 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.903617] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 489b68f2-c2f2-4710-a06f-45ad8c577441/489b68f2-c2f2-4710-a06f-45ad8c577441.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.904466] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1636fb2e-30c5-4f75-bcaf-47e6466ab526 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.924100] env[69994]: DEBUG oslo_vmware.api [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926159, 'name': PowerOnVM_Task, 'duration_secs': 0.523092} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.924806] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1091.925017] env[69994]: INFO nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1091.925206] env[69994]: DEBUG nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1091.925981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9487bc-26c0-4c24-a9ce-b0859e20ebf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.931066] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1091.931066] env[69994]: value = "task-2926162" [ 1091.931066] env[69994]: _type = "Task" [ 1091.931066] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.944527] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.019172] env[69994]: DEBUG oslo_concurrency.lockutils [req-65f6191e-2de5-4412-a05f-71b250824ff9 req-0d551946-454f-4f33-abf9-ef1487c94564 service nova] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.053581] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926161, 'name': PowerOffVM_Task, 'duration_secs': 0.383164} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.053581] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.053581] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.053950] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3070d2d-5c10-449e-bed4-883db570712f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.119143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.121699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.357s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.121939] env[69994]: DEBUG nova.objects.instance [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lazy-loading 'resources' on Instance uuid 922799c0-707c-4f4e-a54c-f015eab0a8d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.149033] env[69994]: INFO nova.scheduler.client.report [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted allocations for instance 6c81eb8b-78d7-469d-8076-13d8a8f61fec [ 1092.193032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.193032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.193032] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleting the datastore file [datastore1] d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.193032] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db187466-464d-40c8-9c8f-506a6ba0b27f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.203712] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1092.203712] env[69994]: value = "task-2926164" [ 1092.203712] env[69994]: _type = "Task" [ 1092.203712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.214082] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.235482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0930fbae-1a9d-4466-a03c-64bdd43b1f97 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-d5af7ae1-d68e-4170-b762-e56d7f2551d7-980fbc36-1a58-4992-a66c-ec31e2a90b67" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.793s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.252249] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.275579] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.275721] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.275822] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.276011] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.276164] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.276309] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.276514] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.276715] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.276824] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.276983] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.277179] env[69994]: DEBUG nova.virt.hardware [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.278064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6f41f5-710f-4afc-a68a-4dc62bb2569a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.286920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fc3057-e487-4939-9e01-5fd6240251ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.297297] env[69994]: DEBUG nova.compute.manager [req-784faf30-41bb-449e-a93d-dbdb3fd3d6f4 req-44cf524b-1219-4db5-9952-5fe8fe3ae422 service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-deleted-dda9491b-4aa9-4a86-9da0-a1f044e25c0e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.447085] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.450395] env[69994]: INFO nova.compute.manager [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Took 17.64 seconds to build instance. [ 1092.658869] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a357c2e0-95c2-45c0-9af3-c91528c1f318 tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "6c81eb8b-78d7-469d-8076-13d8a8f61fec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.030s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.715080] env[69994]: DEBUG oslo_vmware.api [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.492717} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.718273] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.719017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.719320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.719561] env[69994]: INFO nova.compute.manager [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1092.719846] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.720684] env[69994]: DEBUG nova.compute.manager [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.720886] env[69994]: DEBUG nova.network.neutron [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.788744] env[69994]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port dda9491b-4aa9-4a86-9da0-a1f044e25c0e could not be found.", "detail": ""}} {{(pid=69994) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1092.789035] env[69994]: DEBUG nova.network.neutron [-] Unable to show port dda9491b-4aa9-4a86-9da0-a1f044e25c0e as it no longer exists. {{(pid=69994) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1092.827128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "85b58e95-04fd-45ff-ac60-d0167031e148" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.827128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.946823] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Successfully updated port: 54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.947208] env[69994]: DEBUG nova.compute.manager [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Received event network-vif-plugged-54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.947416] env[69994]: DEBUG oslo_concurrency.lockutils [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.947634] env[69994]: DEBUG oslo_concurrency.lockutils [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.947770] env[69994]: DEBUG oslo_concurrency.lockutils [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.947957] env[69994]: DEBUG nova.compute.manager [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] No waiting events found dispatching network-vif-plugged-54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.950289] env[69994]: WARNING nova.compute.manager [req-84e9949c-5adc-4938-9dff-6e4bfca5df72 req-6d0d877d-61f5-49be-a7cb-ccc8d7e4bb5e service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Received unexpected event network-vif-plugged-54d38568-e06b-4b75-a558-72b7cd089413 for instance with vm_state building and task_state spawning. [ 1092.956947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c233baf8-5a95-4963-a31c-b3a63130cc98 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.192s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.958268] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926162, 'name': ReconfigVM_Task, 'duration_secs': 0.778} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.959985] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 489b68f2-c2f2-4710-a06f-45ad8c577441/489b68f2-c2f2-4710-a06f-45ad8c577441.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.961981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf736f84-003a-47e0-9ad9-cc0624e67188 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.965950] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64b2ac45-508b-4113-81da-b93477793304 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.975739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a75679a-dcba-40ee-a9ce-98f168b0fd9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.979333] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1092.979333] env[69994]: value = "task-2926165" [ 1092.979333] env[69994]: _type = "Task" [ 1092.979333] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.012841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70487a8f-d990-4fbf-9a57-06c32fabd86e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.024719] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa13278-d997-4a65-beb7-b7e851917930 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.029303] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926165, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.040754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.041105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.041421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.041675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.041924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.044045] env[69994]: DEBUG nova.compute.provider_tree [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.045623] env[69994]: INFO nova.compute.manager [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Terminating instance [ 1093.328393] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1093.458752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.459025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.459293] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.490356] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926165, 'name': Rename_Task, 'duration_secs': 0.490433} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.490646] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.490889] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-823ee199-decf-46fc-8fdf-883b344101ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.497636] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1093.497636] env[69994]: value = "task-2926166" [ 1093.497636] env[69994]: _type = "Task" [ 1093.497636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.507706] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.549056] env[69994]: DEBUG nova.scheduler.client.report [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.553320] env[69994]: DEBUG nova.compute.manager [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1093.553320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.554233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95e51a6-bd80-4908-a9dd-1250519fdce6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.562887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.563162] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f491fa6e-cbe6-476a-aa87-69760cdf38dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.571511] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1093.571511] env[69994]: value = "task-2926167" [ 1093.571511] env[69994]: _type = "Task" [ 1093.571511] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.581611] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.750841] env[69994]: DEBUG nova.network.neutron [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.852198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.996389] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.008536] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926166, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.054849] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.056982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.465s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.073685] env[69994]: INFO nova.scheduler.client.report [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Deleted allocations for instance 922799c0-707c-4f4e-a54c-f015eab0a8d7 [ 1094.089711] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926167, 'name': PowerOffVM_Task, 'duration_secs': 0.198617} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.090110] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.090349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.090544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1cc9a9d-0541-45f7-bcae-60e76a49c3e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.164116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.164368] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.168029] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore2] a7dd4e89-a953-49b4-b56f-fdacef3a621b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.168029] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a7dd44f-2b10-4a1f-aae4-0d6098235acc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.175848] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1094.175848] env[69994]: value = "task-2926169" [ 1094.175848] env[69994]: _type = "Task" [ 1094.175848] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.187732] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.193120] env[69994]: DEBUG nova.network.neutron [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating instance_info_cache with network_info: [{"id": "54d38568-e06b-4b75-a558-72b7cd089413", "address": "fa:16:3e:86:e4:e5", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d38568-e0", "ovs_interfaceid": "54d38568-e06b-4b75-a558-72b7cd089413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.253505] env[69994]: INFO nova.compute.manager [-] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Took 1.53 seconds to deallocate network for instance. [ 1094.323117] env[69994]: DEBUG nova.compute.manager [req-92de7c86-8713-4822-a610-f06825f240be req-97aebf86-3fa6-4f02-9d6d-46500e8cd79b service nova] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Received event network-vif-deleted-cf663439-5f58-4ebe-9323-5937dcc425a7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.508479] env[69994]: DEBUG oslo_vmware.api [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926166, 'name': PowerOnVM_Task, 'duration_secs': 0.535725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.508762] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.509060] env[69994]: INFO nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1094.509267] env[69994]: DEBUG nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.510141] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785f09af-2efc-4b9c-871d-578a79de5186 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.587137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d4a31e1-5048-4906-952d-54fff03d32ff tempest-ListServersNegativeTestJSON-1374068228 tempest-ListServersNegativeTestJSON-1374068228-project-member] Lock "922799c0-707c-4f4e-a54c-f015eab0a8d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.173s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.686868] env[69994]: DEBUG oslo_vmware.api [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.687128] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.687322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.687508] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.687689] env[69994]: INFO nova.compute.manager [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1094.687935] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.688153] env[69994]: DEBUG nova.compute.manager [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1094.688248] env[69994]: DEBUG nova.network.neutron [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1094.697534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.697842] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Instance network_info: |[{"id": "54d38568-e06b-4b75-a558-72b7cd089413", "address": "fa:16:3e:86:e4:e5", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d38568-e0", "ovs_interfaceid": "54d38568-e06b-4b75-a558-72b7cd089413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1094.698250] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:e4:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54d38568-e06b-4b75-a558-72b7cd089413', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.706317] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.706536] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.706820] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-996fb83b-0d3d-4b33-9666-08cd18ffeaa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.728044] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.728044] env[69994]: value = "task-2926170" [ 1094.728044] env[69994]: _type = "Task" [ 1094.728044] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.736447] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926170, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.760857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.994348] env[69994]: DEBUG nova.compute.manager [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Received event network-changed-54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.994735] env[69994]: DEBUG nova.compute.manager [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Refreshing instance network info cache due to event network-changed-54d38568-e06b-4b75-a558-72b7cd089413. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1094.994793] env[69994]: DEBUG oslo_concurrency.lockutils [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] Acquiring lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.994936] env[69994]: DEBUG oslo_concurrency.lockutils [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] Acquired lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.995106] env[69994]: DEBUG nova.network.neutron [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Refreshing network info cache for port 54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.030784] env[69994]: INFO nova.compute.manager [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Took 19.10 seconds to build instance. [ 1095.069018] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating resource usage from migration 1f04407a-5664-4068-88a4-f8523ad869fc [ 1095.069559] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating resource usage from migration 740fc0de-ab50-4550-98f1-7276a9ece169 [ 1095.096490] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 06fa5ab5-baab-466e-8574-5391247c13a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.096672] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance d5af7ae1-d68e-4170-b762-e56d7f2551d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.096885] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.097060] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 68eba44a-0989-47dc-a88b-102d9aa34c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.097199] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 85293c91-f363-4085-9eb8-2bf6514fa2f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.097862] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 3c374550-d65b-494a-89d7-60720f6b44dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.098036] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance a7dd4e89-a953-49b4-b56f-fdacef3a621b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.098265] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 489b68f2-c2f2-4710-a06f-45ad8c577441 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.098384] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance dd196e59-868b-409f-bddb-bb99b0c1092f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.098451] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration 740fc0de-ab50-4550-98f1-7276a9ece169 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1095.098613] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance f0b77732-aae1-4790-a2c7-75586e78eda6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1095.240109] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926170, 'name': CreateVM_Task, 'duration_secs': 0.377784} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.240369] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.241087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.241255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.241592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1095.241886] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99555bc4-8b1c-4538-bf05-58a79fc9f95a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.248362] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1095.248362] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5246b209-7c7f-3b5b-ca93-efc1d77a5da1" [ 1095.248362] env[69994]: _type = "Task" [ 1095.248362] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.258409] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5246b209-7c7f-3b5b-ca93-efc1d77a5da1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.478207] env[69994]: DEBUG nova.network.neutron [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.533015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c9acbcc-2d81-430b-9107-6c41edd8276d tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.608s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.604365] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5b73cd44-6c89-4e12-9195-16b5172cbf2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1095.759900] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5246b209-7c7f-3b5b-ca93-efc1d77a5da1, 'name': SearchDatastore_Task, 'duration_secs': 0.033426} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.760408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.760706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1095.761030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.761292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.761560] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.761893] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1905ab2-e907-4dc1-9673-0c643213d2ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.764629] env[69994]: DEBUG nova.network.neutron [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updated VIF entry in instance network info cache for port 54d38568-e06b-4b75-a558-72b7cd089413. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.765039] env[69994]: DEBUG nova.network.neutron [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating instance_info_cache with network_info: [{"id": "54d38568-e06b-4b75-a558-72b7cd089413", "address": "fa:16:3e:86:e4:e5", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d38568-e0", "ovs_interfaceid": "54d38568-e06b-4b75-a558-72b7cd089413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.774255] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1095.774438] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1095.776541] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c482d8b-bf16-4bae-95e2-66e6700d921f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.782827] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1095.782827] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528e1435-5c45-ad9c-42dc-1a6caa859e96" [ 1095.782827] env[69994]: _type = "Task" [ 1095.782827] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.792469] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528e1435-5c45-ad9c-42dc-1a6caa859e96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.980727] env[69994]: INFO nova.compute.manager [-] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Took 1.29 seconds to deallocate network for instance. [ 1096.107631] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1096.268342] env[69994]: DEBUG oslo_concurrency.lockutils [req-c8a80c69-dba9-4a6b-9bb8-f690c0cca133 req-21277a7a-4421-49a9-b8a3-b1d226b7e2d7 service nova] Releasing lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.295379] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528e1435-5c45-ad9c-42dc-1a6caa859e96, 'name': SearchDatastore_Task, 'duration_secs': 0.012038} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.296236] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c99fbba-3263-4eec-b2f9-eaade561b056 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.301827] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1096.301827] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526144cb-c38b-76db-999d-96b752b3453e" [ 1096.301827] env[69994]: _type = "Task" [ 1096.301827] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.309975] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526144cb-c38b-76db-999d-96b752b3453e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.355047] env[69994]: DEBUG nova.compute.manager [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Received event network-vif-deleted-858e65bf-8ce8-45e6-878b-36f3c884077d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.355047] env[69994]: DEBUG nova.compute.manager [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Received event network-changed-a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.355219] env[69994]: DEBUG nova.compute.manager [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Refreshing instance network info cache due to event network-changed-a40388ce-c3c1-480f-8e1e-160c56294eab. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1096.355521] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] Acquiring lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.355669] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] Acquired lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.355829] env[69994]: DEBUG nova.network.neutron [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Refreshing network info cache for port a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1096.497941] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.612195] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1096.612377] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration 1f04407a-5664-4068-88a4-f8523ad869fc is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1096.612468] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance c7c17fab-71a4-44df-907e-f7b408f80236 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.813170] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526144cb-c38b-76db-999d-96b752b3453e, 'name': SearchDatastore_Task, 'duration_secs': 0.011006} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.813468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.813736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] dd196e59-868b-409f-bddb-bb99b0c1092f/dd196e59-868b-409f-bddb-bb99b0c1092f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1096.814038] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76651a3f-d8c5-4f90-a3b3-91718933381b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.822378] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1096.822378] env[69994]: value = "task-2926171" [ 1096.822378] env[69994]: _type = "Task" [ 1096.822378] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.831262] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.119515] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 85b58e95-04fd-45ff-ac60-d0167031e148 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1097.119859] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1097.119859] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1097.166614] env[69994]: DEBUG nova.network.neutron [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updated VIF entry in instance network info cache for port a40388ce-c3c1-480f-8e1e-160c56294eab. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.167584] env[69994]: DEBUG nova.network.neutron [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updating instance_info_cache with network_info: [{"id": "a40388ce-c3c1-480f-8e1e-160c56294eab", "address": "fa:16:3e:d4:25:ac", "network": {"id": "8bc3ada1-0296-4b96-9057-67d0d9a1fb35", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-529481842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6df35f7e7a843648cacb18fc8dde527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40388ce-c3", "ovs_interfaceid": "a40388ce-c3c1-480f-8e1e-160c56294eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.333683] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470995} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.333978] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] dd196e59-868b-409f-bddb-bb99b0c1092f/dd196e59-868b-409f-bddb-bb99b0c1092f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.334230] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.334921] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c58f53d4-5677-4c17-a66e-c428f63bc99e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.343793] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1097.343793] env[69994]: value = "task-2926172" [ 1097.343793] env[69994]: _type = "Task" [ 1097.343793] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.354193] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.380417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335a6d46-3d8c-48f6-b594-94cbacb72ba7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.388992] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b35757-720a-4c63-a9b8-fed7d86aa5e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.422524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a15e12a-89eb-43e4-89aa-e341828851cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.430432] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08996381-b451-4210-8e3e-71a49a5bab89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.445154] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.670193] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8651736-51d9-4c33-9c0e-33c7a63b5ddc req-b6ce9870-ee5f-4777-ae83-3d099416cfcb service nova] Releasing lock "refresh_cache-489b68f2-c2f2-4710-a06f-45ad8c577441" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.854991] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073879} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.856769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1097.856769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30519c41-0944-4b5a-a642-b06a86658ad2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.882324] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] dd196e59-868b-409f-bddb-bb99b0c1092f/dd196e59-868b-409f-bddb-bb99b0c1092f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.882728] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c94a840-4e9b-4d98-adb9-bfaa5348d82b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.904395] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1097.904395] env[69994]: value = "task-2926173" [ 1097.904395] env[69994]: _type = "Task" [ 1097.904395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.915874] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926173, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.948308] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.415749] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.454064] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1098.454327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.397s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.454607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.577s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.916961] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.960515] env[69994]: INFO nova.compute.claims [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.419699] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926173, 'name': ReconfigVM_Task, 'duration_secs': 1.155763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.420277] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] dd196e59-868b-409f-bddb-bb99b0c1092f/dd196e59-868b-409f-bddb-bb99b0c1092f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.420642] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-717dff43-8e28-450e-b2d6-62e409b1e243 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.428106] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1099.428106] env[69994]: value = "task-2926174" [ 1099.428106] env[69994]: _type = "Task" [ 1099.428106] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.438300] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926174, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.467710] env[69994]: INFO nova.compute.resource_tracker [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating resource usage from migration 740fc0de-ab50-4550-98f1-7276a9ece169 [ 1099.726449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4be1198-cdee-4415-8a91-942ba43a0158 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.734800] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259e137b-76d3-49fe-999e-f8d04b9bc494 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.765112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88340987-3dc9-422a-b7c6-7724c7f331fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.773733] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d34744d-4449-4175-8b23-781ef596a738 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.788350] env[69994]: DEBUG nova.compute.provider_tree [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.939710] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926174, 'name': Rename_Task, 'duration_secs': 0.141235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.939930] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1099.940208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f434baf0-5d8d-4fcc-8daa-85e96d58e581 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.947436] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1099.947436] env[69994]: value = "task-2926175" [ 1099.947436] env[69994]: _type = "Task" [ 1099.947436] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.958118] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.291458] env[69994]: DEBUG nova.scheduler.client.report [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.459516] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.797960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.343s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.798247] env[69994]: INFO nova.compute.manager [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Migrating [ 1100.806647] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.394s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.807580] env[69994]: INFO nova.compute.claims [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.961638] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.324016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.324379] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.325024] env[69994]: DEBUG nova.network.neutron [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.433588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.433829] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.462183] env[69994]: DEBUG oslo_vmware.api [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926175, 'name': PowerOnVM_Task, 'duration_secs': 1.179605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.462586] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.462882] env[69994]: INFO nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1101.463164] env[69994]: DEBUG nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.464534] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efbc67e-1e53-48a1-b6bd-569b7715f2b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.936600] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1101.989309] env[69994]: INFO nova.compute.manager [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Took 23.75 seconds to build instance. [ 1102.072097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925c6997-f453-43b6-8ff9-dab24aacd943 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.083120] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a3caec-0d54-4689-90c3-62d46d67ae58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.120331] env[69994]: DEBUG nova.network.neutron [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.122181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edf551d-f088-42bf-8bf6-21a534446bef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.131797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e219ce-4a71-4c3b-8cea-9d176dd4c86f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.147985] env[69994]: DEBUG nova.compute.provider_tree [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.460988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.490445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c25d86ee-bf4e-40f8-8559-eff0166be4e0 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.259s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.625814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.651383] env[69994]: DEBUG nova.scheduler.client.report [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1102.900365] env[69994]: DEBUG nova.compute.manager [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Received event network-changed-54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.900365] env[69994]: DEBUG nova.compute.manager [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Refreshing instance network info cache due to event network-changed-54d38568-e06b-4b75-a558-72b7cd089413. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1102.900365] env[69994]: DEBUG oslo_concurrency.lockutils [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] Acquiring lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.900365] env[69994]: DEBUG oslo_concurrency.lockutils [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] Acquired lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.900365] env[69994]: DEBUG nova.network.neutron [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Refreshing network info cache for port 54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1103.156651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.157230] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1103.160964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.627s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.165380] env[69994]: INFO nova.compute.claims [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1103.641680] env[69994]: DEBUG nova.network.neutron [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updated VIF entry in instance network info cache for port 54d38568-e06b-4b75-a558-72b7cd089413. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1103.642306] env[69994]: DEBUG nova.network.neutron [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating instance_info_cache with network_info: [{"id": "54d38568-e06b-4b75-a558-72b7cd089413", "address": "fa:16:3e:86:e4:e5", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d38568-e0", "ovs_interfaceid": "54d38568-e06b-4b75-a558-72b7cd089413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.670631] env[69994]: DEBUG nova.compute.utils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1103.672798] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1103.673015] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1103.716389] env[69994]: DEBUG nova.policy [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75c4f3c8013e4487aeb79068c4b7a3d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0af2d3f09d264d4c9bba8747f74383bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1103.985122] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Successfully created port: 27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.144469] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ca9200-dc43-47e6-b3b4-88b77304a1a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.146641] env[69994]: DEBUG oslo_concurrency.lockutils [req-048e8569-55b3-4e61-b3ff-05961d016096 req-87fcd3fd-485a-4fc1-a289-1ec34290e788 service nova] Releasing lock "refresh_cache-dd196e59-868b-409f-bddb-bb99b0c1092f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.162421] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1104.173838] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1104.429006] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e34352-1c17-41f7-8261-32822ee0518e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.437692] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d44e29-cfe1-4d47-985f-47914b08e287 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.468996] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a493b424-e3e0-4e82-90a7-b5b838253f7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.477097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b95e1c2-511f-4e19-a6df-d4cf6524d938 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.490664] env[69994]: DEBUG nova.compute.provider_tree [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.668084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.668454] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdc91d9a-2bf4-4389-b4be-431fae7953d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.676275] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1104.676275] env[69994]: value = "task-2926176" [ 1104.676275] env[69994]: _type = "Task" [ 1104.676275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.687561] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.994280] env[69994]: DEBUG nova.scheduler.client.report [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.187030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1105.187228] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.191228] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1105.216476] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.216740] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.216878] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.217078] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.217229] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.217377] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.217587] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.217745] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.217908] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.218094] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.218296] env[69994]: DEBUG nova.virt.hardware [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.219153] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c369ca19-9905-4b91-8e96-43fec3d828aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.227583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11461c4-aa10-4d50-816d-aa6e2f6bded0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.472986] env[69994]: DEBUG nova.compute.manager [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Received event network-vif-plugged-27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.472986] env[69994]: DEBUG oslo_concurrency.lockutils [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] Acquiring lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.472986] env[69994]: DEBUG oslo_concurrency.lockutils [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.472986] env[69994]: DEBUG oslo_concurrency.lockutils [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.472986] env[69994]: DEBUG nova.compute.manager [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] No waiting events found dispatching network-vif-plugged-27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1105.472986] env[69994]: WARNING nova.compute.manager [req-5c3e27fa-9dbf-42d8-b4cb-093f20c8002c req-86395b8d-30a6-412d-afe5-2b9c2d58dc21 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Received unexpected event network-vif-plugged-27c41d65-6213-4b24-b4b8-08a5175c26ef for instance with vm_state building and task_state spawning. [ 1105.498622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.499166] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1105.501770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.506330] env[69994]: INFO nova.compute.claims [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.560449] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Successfully updated port: 27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.695755] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.696086] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.696173] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.696367] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.696871] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.696871] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.696871] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.696990] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.697142] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.697304] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.697476] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.703199] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1bbce6f-dae4-46ce-91a5-40699d84c32c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.719413] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1105.719413] env[69994]: value = "task-2926177" [ 1105.719413] env[69994]: _type = "Task" [ 1105.719413] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.730042] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926177, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.007846] env[69994]: DEBUG nova.compute.utils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1106.011821] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1106.011991] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1106.048998] env[69994]: DEBUG nova.policy [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fda1d6c9ff1741499a645d4c3b3a7a3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99cb9732709047989856dae779a21f2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1106.066845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.067037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.067467] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1106.230320] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926177, 'name': ReconfigVM_Task, 'duration_secs': 0.194477} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.230637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.321107] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Successfully created port: 8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1106.517582] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1106.622406] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1106.736598] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.736941] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.736994] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.737178] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.737323] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.737469] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.737667] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.737825] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.737997] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.738221] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.738401] env[69994]: DEBUG nova.virt.hardware [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.744037] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfiguring VM instance instance-0000003f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1106.746392] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b389bd3c-e61a-49e6-82ef-03c5579dd5d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.766275] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1106.766275] env[69994]: value = "task-2926178" [ 1106.766275] env[69994]: _type = "Task" [ 1106.766275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.779633] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.826209] env[69994]: DEBUG nova.network.neutron [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Updating instance_info_cache with network_info: [{"id": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "address": "fa:16:3e:81:35:41", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c41d65-62", "ovs_interfaceid": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.829516] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e6369b-0937-4e3e-9230-6339b89dac33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.837701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f635a3-7566-4814-9f1d-9fabeec7f502 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.869399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ce5950-dc29-4873-8d8a-b5a5b692b407 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.878314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bfd1c0-7c20-44c3-8855-da4cacf1636b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.894373] env[69994]: DEBUG nova.compute.provider_tree [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.276735] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926178, 'name': ReconfigVM_Task, 'duration_secs': 0.167792} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.277040] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfigured VM instance instance-0000003f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1107.277827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d89933-ceea-478b-a4cc-7a8290729b8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.301043] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1107.301454] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ccca790-3484-477f-babe-8606ac8e59b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.320975] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1107.320975] env[69994]: value = "task-2926179" [ 1107.320975] env[69994]: _type = "Task" [ 1107.320975] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.329148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.329442] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Instance network_info: |[{"id": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "address": "fa:16:3e:81:35:41", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c41d65-62", "ovs_interfaceid": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1107.329704] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.330052] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:35:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27c41d65-6213-4b24-b4b8-08a5175c26ef', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.337637] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.337777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.337990] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8308f07a-a87b-40f3-9f6e-90275911415d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.359321] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.359321] env[69994]: value = "task-2926180" [ 1107.359321] env[69994]: _type = "Task" [ 1107.359321] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.367287] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926180, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.399480] env[69994]: DEBUG nova.scheduler.client.report [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1107.510433] env[69994]: DEBUG nova.compute.manager [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Received event network-changed-27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.510433] env[69994]: DEBUG nova.compute.manager [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Refreshing instance network info cache due to event network-changed-27c41d65-6213-4b24-b4b8-08a5175c26ef. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1107.510433] env[69994]: DEBUG oslo_concurrency.lockutils [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] Acquiring lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.510576] env[69994]: DEBUG oslo_concurrency.lockutils [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] Acquired lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.510747] env[69994]: DEBUG nova.network.neutron [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Refreshing network info cache for port 27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1107.526568] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1107.552968] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1107.553254] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.553443] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1107.553653] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.553821] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1107.554016] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1107.554268] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1107.554459] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1107.554668] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1107.554859] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1107.555084] env[69994]: DEBUG nova.virt.hardware [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1107.556274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23814ac0-6bab-4ed2-b922-a92393af6474 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.565735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd67f4b-b05d-4435-adce-b62801ac0e9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.832700] env[69994]: DEBUG oslo_vmware.api [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926179, 'name': ReconfigVM_Task, 'duration_secs': 0.427587} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.833398] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Reconfigured VM instance instance-0000003f to attach disk [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6/f0b77732-aae1-4790-a2c7-75586e78eda6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.833994] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1107.857450] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Successfully updated port: 8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.871070] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926180, 'name': CreateVM_Task, 'duration_secs': 0.328778} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.871070] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1107.871070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.871070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.871070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1107.871381] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c4659d0-7c8e-431a-be1f-b0b8c1a7ae98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.877717] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1107.877717] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd8959-0409-f55a-f593-5289fdc35a71" [ 1107.877717] env[69994]: _type = "Task" [ 1107.877717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.886656] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd8959-0409-f55a-f593-5289fdc35a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.904232] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.904747] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1107.907670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 17.065s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.190717] env[69994]: DEBUG nova.network.neutron [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Updated VIF entry in instance network info cache for port 27c41d65-6213-4b24-b4b8-08a5175c26ef. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.191095] env[69994]: DEBUG nova.network.neutron [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Updating instance_info_cache with network_info: [{"id": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "address": "fa:16:3e:81:35:41", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c41d65-62", "ovs_interfaceid": "27c41d65-6213-4b24-b4b8-08a5175c26ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.341028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773e3057-ebb3-4a36-8689-36e4f069462c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.360346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.360520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquired lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.360668] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.362476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f150a83-8464-4cf9-99e5-2b779842a574 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.381957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1108.394806] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52bd8959-0409-f55a-f593-5289fdc35a71, 'name': SearchDatastore_Task, 'duration_secs': 0.01077} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.395113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.396075] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.396075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.396075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.396075] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.396245] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed79b63e-4875-4c41-9660-f9abac175663 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.405322] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.405505] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.406243] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-030502e2-b5c4-4628-ba88-f3bc9b54a453 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.411061] env[69994]: DEBUG nova.compute.utils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1108.414270] env[69994]: INFO nova.compute.claims [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.418785] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1108.419363] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1108.421305] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1108.421305] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e64fa8-5e78-889b-c9f4-ecd6ccdd57ab" [ 1108.421305] env[69994]: _type = "Task" [ 1108.421305] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.434554] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e64fa8-5e78-889b-c9f4-ecd6ccdd57ab, 'name': SearchDatastore_Task, 'duration_secs': 0.00949} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.435508] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6727137e-04f1-44ef-b533-46514fe0c411 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.442032] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1108.442032] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9d9d3-7633-d745-f92a-640a8b6b791b" [ 1108.442032] env[69994]: _type = "Task" [ 1108.442032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.451118] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9d9d3-7633-d745-f92a-640a8b6b791b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.460869] env[69994]: DEBUG nova.policy [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcafd04d09f45fab9d573d11d01dfbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c324e22a0046460b9ad3ad8578f7ef6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1108.693430] env[69994]: DEBUG oslo_concurrency.lockutils [req-46c4683f-2c5c-4029-b596-a2959ffa0a95 req-4ed17890-618c-4ea1-8aa2-2dc899223970 service nova] Releasing lock "refresh_cache-5b73cd44-6c89-4e12-9195-16b5172cbf2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.766470] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Successfully created port: 29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1108.895262] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1108.922023] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1108.924183] env[69994]: INFO nova.compute.resource_tracker [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating resource usage from migration 1f04407a-5664-4068-88a4-f8523ad869fc [ 1108.954490] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c9d9d3-7633-d745-f92a-640a8b6b791b, 'name': SearchDatastore_Task, 'duration_secs': 0.010564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.954747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.955014] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5b73cd44-6c89-4e12-9195-16b5172cbf2c/5b73cd44-6c89-4e12-9195-16b5172cbf2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1108.955293] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca98bf55-6128-4036-9098-4e1518493313 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.967220] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1108.967220] env[69994]: value = "task-2926181" [ 1108.967220] env[69994]: _type = "Task" [ 1108.967220] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.972881] env[69994]: DEBUG nova.network.neutron [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Port 0f0159af-9c04-46fe-8fac-ebd620726fd7 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1108.988095] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.077849] env[69994]: DEBUG nova.network.neutron [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updating instance_info_cache with network_info: [{"id": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "address": "fa:16:3e:22:b2:41", "network": {"id": "2f9ca5ad-077e-48f8-a832-45aad5cb5b58", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-268034529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99cb9732709047989856dae779a21f2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b5b8c81-ea", "ovs_interfaceid": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.212584] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d1abc9-4f23-40d7-a390-ab73fc151c49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.223037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f943461-5604-4a52-bd88-223617d26fc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.256840] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae5811a-cd99-4d51-a34a-95922b288c69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.265976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19f2c0d-dd98-4fe5-98cf-e0211ff1618b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.282433] env[69994]: DEBUG nova.compute.provider_tree [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.485692] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433033} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.486152] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5b73cd44-6c89-4e12-9195-16b5172cbf2c/5b73cd44-6c89-4e12-9195-16b5172cbf2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1109.486370] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.486648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7509ac39-8591-4ad2-913c-e31de844a205 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.494101] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1109.494101] env[69994]: value = "task-2926182" [ 1109.494101] env[69994]: _type = "Task" [ 1109.494101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.502534] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.536683] env[69994]: DEBUG nova.compute.manager [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Received event network-vif-plugged-8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1109.536683] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Acquiring lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.536905] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.537335] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.537335] env[69994]: DEBUG nova.compute.manager [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] No waiting events found dispatching network-vif-plugged-8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1109.537550] env[69994]: WARNING nova.compute.manager [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Received unexpected event network-vif-plugged-8b5b8c81-eaad-48a6-840d-849dd8ea42b8 for instance with vm_state building and task_state spawning. [ 1109.537550] env[69994]: DEBUG nova.compute.manager [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Received event network-changed-8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1109.537673] env[69994]: DEBUG nova.compute.manager [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Refreshing instance network info cache due to event network-changed-8b5b8c81-eaad-48a6-840d-849dd8ea42b8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1109.537837] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Acquiring lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.580107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Releasing lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.580107] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Instance network_info: |[{"id": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "address": "fa:16:3e:22:b2:41", "network": {"id": "2f9ca5ad-077e-48f8-a832-45aad5cb5b58", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-268034529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99cb9732709047989856dae779a21f2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b5b8c81-ea", "ovs_interfaceid": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1109.580295] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Acquired lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.580576] env[69994]: DEBUG nova.network.neutron [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Refreshing network info cache for port 8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.581909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:b2:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b5b8c81-eaad-48a6-840d-849dd8ea42b8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.589749] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Creating folder: Project (99cb9732709047989856dae779a21f2e). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1109.590517] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f97d76a4-b523-4e4e-aaa4-35f9d06324b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.604853] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Created folder: Project (99cb9732709047989856dae779a21f2e) in parent group-v587342. [ 1109.605102] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Creating folder: Instances. Parent ref: group-v587617. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1109.605451] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70ffedbd-4710-467b-9397-081aef1020c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.617036] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Created folder: Instances in parent group-v587617. [ 1109.617224] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1109.617460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1109.617676] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e26bc401-15dd-4a38-9f61-5dce8c065990 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.639562] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.639562] env[69994]: value = "task-2926185" [ 1109.639562] env[69994]: _type = "Task" [ 1109.639562] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.648469] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926185, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.785472] env[69994]: DEBUG nova.scheduler.client.report [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.929995] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1109.956101] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1109.956406] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.956584] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1109.956769] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.956916] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1109.957078] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1109.957288] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1109.957447] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1109.957613] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1109.957778] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1109.957951] env[69994]: DEBUG nova.virt.hardware [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1109.958871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aade8f1-5c96-464f-aaae-3d11c30d2449 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.967155] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9890c7a8-deeb-426f-9877-239d2e514c3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.003684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.003796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.003940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.011064] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.267371} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.011870] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.012539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d16b68-85f9-4388-9cd2-f20c60fcdd0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.035679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 5b73cd44-6c89-4e12-9195-16b5172cbf2c/5b73cd44-6c89-4e12-9195-16b5172cbf2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.036811] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd8b4645-735e-45c4-9ff5-ad2605639243 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.057670] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1110.057670] env[69994]: value = "task-2926186" [ 1110.057670] env[69994]: _type = "Task" [ 1110.057670] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.065991] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.152267] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926185, 'name': CreateVM_Task, 'duration_secs': 0.410501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.153031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1110.153410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.153410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.153525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1110.153797] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1835a32-ddee-4554-892c-335d8d07f077 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.158663] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1110.158663] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527140ac-fa51-e5ab-99b9-8bac8d251554" [ 1110.158663] env[69994]: _type = "Task" [ 1110.158663] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.168829] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527140ac-fa51-e5ab-99b9-8bac8d251554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.293042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.385s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.293326] env[69994]: INFO nova.compute.manager [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Migrating [ 1110.304030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.452s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.305837] env[69994]: INFO nova.compute.claims [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1110.386545] env[69994]: DEBUG nova.network.neutron [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updated VIF entry in instance network info cache for port 8b5b8c81-eaad-48a6-840d-849dd8ea42b8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.387026] env[69994]: DEBUG nova.network.neutron [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updating instance_info_cache with network_info: [{"id": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "address": "fa:16:3e:22:b2:41", "network": {"id": "2f9ca5ad-077e-48f8-a832-45aad5cb5b58", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-268034529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99cb9732709047989856dae779a21f2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b5b8c81-ea", "ovs_interfaceid": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.435793] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Successfully updated port: 29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.571151] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926186, 'name': ReconfigVM_Task, 'duration_secs': 0.300487} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.571510] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 5b73cd44-6c89-4e12-9195-16b5172cbf2c/5b73cd44-6c89-4e12-9195-16b5172cbf2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.572161] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6b75043-437f-40c9-baf2-64c956ed377c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.579542] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1110.579542] env[69994]: value = "task-2926187" [ 1110.579542] env[69994]: _type = "Task" [ 1110.579542] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.589056] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926187, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.670110] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527140ac-fa51-e5ab-99b9-8bac8d251554, 'name': SearchDatastore_Task, 'duration_secs': 0.011404} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.670515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.670687] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.671517] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.671517] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.671517] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1110.671631] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-763b9e9d-f15e-44ee-a703-b4d7795df29d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.681028] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1110.681221] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1110.681959] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca9e1b07-fcb2-404f-b70a-4f4ccf45b75e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.687220] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1110.687220] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261c188-bae7-693c-b5f5-a441a5bf62a9" [ 1110.687220] env[69994]: _type = "Task" [ 1110.687220] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.695891] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261c188-bae7-693c-b5f5-a441a5bf62a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.822699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.822963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.823176] env[69994]: DEBUG nova.network.neutron [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.890173] env[69994]: DEBUG oslo_concurrency.lockutils [req-7111473d-6089-4beb-abdd-629440682582 req-bb3aa2d9-90cb-421b-b13d-178ef9358f90 service nova] Releasing lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.938608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.938871] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.938933] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.038314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.038558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.038746] env[69994]: DEBUG nova.network.neutron [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.090149] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926187, 'name': Rename_Task, 'duration_secs': 0.16074} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.090504] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.090756] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee7d2f42-4225-49ab-bd29-02ae9b057a4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.097601] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1111.097601] env[69994]: value = "task-2926188" [ 1111.097601] env[69994]: _type = "Task" [ 1111.097601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.105850] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.198320] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5261c188-bae7-693c-b5f5-a441a5bf62a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.199138] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef748fa2-d6ea-4b34-bcd2-234f79fc3442 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.204915] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1111.204915] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52964be6-58b6-20bc-9ad2-bd660a2d8774" [ 1111.204915] env[69994]: _type = "Task" [ 1111.204915] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.213119] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52964be6-58b6-20bc-9ad2-bd660a2d8774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.476661] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1111.568322] env[69994]: DEBUG nova.compute.manager [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Received event network-vif-plugged-29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.569966] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.569966] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.569966] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.569966] env[69994]: DEBUG nova.compute.manager [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] No waiting events found dispatching network-vif-plugged-29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.569966] env[69994]: WARNING nova.compute.manager [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Received unexpected event network-vif-plugged-29cc87df-3c6e-45eb-a80d-5127f53062e1 for instance with vm_state building and task_state spawning. [ 1111.569966] env[69994]: DEBUG nova.compute.manager [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Received event network-changed-29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.569966] env[69994]: DEBUG nova.compute.manager [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Refreshing instance network info cache due to event network-changed-29cc87df-3c6e-45eb-a80d-5127f53062e1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1111.569966] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Acquiring lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.607590] env[69994]: DEBUG oslo_vmware.api [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926188, 'name': PowerOnVM_Task, 'duration_secs': 0.467543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.610098] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.610327] env[69994]: INFO nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Took 6.42 seconds to spawn the instance on the hypervisor. [ 1111.610525] env[69994]: DEBUG nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.611496] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed9f902-40fe-45f9-b568-83e5fb0e4fdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.622346] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e896fb-9745-424f-a1f5-8607c73aef89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.625376] env[69994]: DEBUG nova.network.neutron [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating instance_info_cache with network_info: [{"id": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "address": "fa:16:3e:61:2f:ee", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29cc87df-3c", "ovs_interfaceid": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.642112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d18a0d-d460-4e9d-b530-11bf33dae7e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.678266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61654e6e-009d-4883-9ffa-6e3d99cbe629 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.687311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ff0c93-10e6-4f55-b3ca-82ce3609f66e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.702993] env[69994]: DEBUG nova.compute.provider_tree [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.717461] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52964be6-58b6-20bc-9ad2-bd660a2d8774, 'name': SearchDatastore_Task, 'duration_secs': 0.01145} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.718467] env[69994]: DEBUG nova.network.neutron [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.719552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.719810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a/83a6beb7-5e26-4d90-87c3-28e4f8f1e34a.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1111.720092] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-180916c9-1f28-407c-96cb-a3c9816058d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.730797] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1111.730797] env[69994]: value = "task-2926189" [ 1111.730797] env[69994]: _type = "Task" [ 1111.730797] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.739113] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.827949] env[69994]: DEBUG nova.network.neutron [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.129153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.129489] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance network_info: |[{"id": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "address": "fa:16:3e:61:2f:ee", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29cc87df-3c", "ovs_interfaceid": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1112.130195] env[69994]: INFO nova.compute.manager [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Took 25.74 seconds to build instance. [ 1112.131017] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Acquired lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.131161] env[69994]: DEBUG nova.network.neutron [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Refreshing network info cache for port 29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1112.132434] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:2f:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29cc87df-3c6e-45eb-a80d-5127f53062e1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.140379] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.143807] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.144740] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93e15f4c-14ed-48bd-87f1-7fc7fd26258b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.167319] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1112.167319] env[69994]: value = "task-2926190" [ 1112.167319] env[69994]: _type = "Task" [ 1112.167319] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.177241] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926190, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.210106] env[69994]: DEBUG nova.scheduler.client.report [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.223583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.241147] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926189, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427297} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.243704] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a/83a6beb7-5e26-4d90-87c3-28e4f8f1e34a.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1112.244127] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1112.244745] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-954e8be8-526b-4ebd-b8a7-88cd4ed540de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.253296] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1112.253296] env[69994]: value = "task-2926191" [ 1112.253296] env[69994]: _type = "Task" [ 1112.253296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.263746] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.331315] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.397780] env[69994]: DEBUG nova.network.neutron [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updated VIF entry in instance network info cache for port 29cc87df-3c6e-45eb-a80d-5127f53062e1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1112.398240] env[69994]: DEBUG nova.network.neutron [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating instance_info_cache with network_info: [{"id": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "address": "fa:16:3e:61:2f:ee", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29cc87df-3c", "ovs_interfaceid": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.641962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57e050d9-df9e-4fd1-887f-c78b0099342f tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.268s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.678410] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926190, 'name': CreateVM_Task, 'duration_secs': 0.412043} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.678619] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1112.679364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.679530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.679827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1112.680366] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e4c74e-e6ce-4ab6-a863-83ab473ead40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.685605] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1112.685605] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b91428-3cc4-226c-becb-4c43b80b58e9" [ 1112.685605] env[69994]: _type = "Task" [ 1112.685605] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.693486] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b91428-3cc4-226c-becb-4c43b80b58e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.714302] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.714830] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1112.718257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.957s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.718257] env[69994]: DEBUG nova.objects.instance [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'resources' on Instance uuid d5af7ae1-d68e-4170-b762-e56d7f2551d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.764935] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067726} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.765230] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.766031] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbb44b9-399d-492d-b14f-0581bbae31a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.792379] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a/83a6beb7-5e26-4d90-87c3-28e4f8f1e34a.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.795546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd13026-ad73-4dbe-9f71-7fae7efa9a35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.816696] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1112.816696] env[69994]: value = "task-2926192" [ 1112.816696] env[69994]: _type = "Task" [ 1112.816696] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.827897] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.855625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19c3697-a68a-4764-95b6-fe3df721a875 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.874741] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc049875-2899-4814-9874-bff5ebc0eebe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.882338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1112.902696] env[69994]: DEBUG oslo_concurrency.lockutils [req-77f28576-f4a7-43e5-8e87-be90d6ba6c50 req-a25d4b7c-3dfd-40df-8697-976a9d6d2f09 service nova] Releasing lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.197541] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b91428-3cc4-226c-becb-4c43b80b58e9, 'name': SearchDatastore_Task, 'duration_secs': 0.048077} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.197932] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.198117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1113.198421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.198589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.198773] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1113.199048] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4a637ca-aba8-48f4-b90f-ea9436a2e7c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.201052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.201291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.201500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.201689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.201855] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.203896] env[69994]: INFO nova.compute.manager [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Terminating instance [ 1113.213528] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1113.213717] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1113.214926] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faaf79bd-3d3e-4c35-ae3b-d7535e7dbd63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.221323] env[69994]: DEBUG nova.compute.utils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1113.226553] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1113.226821] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.228772] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1113.228772] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ad7e67-93f6-6196-94b3-2da8c89640a7" [ 1113.228772] env[69994]: _type = "Task" [ 1113.228772] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.243088] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ad7e67-93f6-6196-94b3-2da8c89640a7, 'name': SearchDatastore_Task, 'duration_secs': 0.011169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.243884] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1236ae7a-90ba-4d1d-a93c-1daacb1e13a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.252885] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1113.252885] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aeb7a8-c28a-e947-74e0-0135393d71d5" [ 1113.252885] env[69994]: _type = "Task" [ 1113.252885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.263900] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aeb7a8-c28a-e947-74e0-0135393d71d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.280045] env[69994]: DEBUG nova.policy [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1113.328286] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926192, 'name': ReconfigVM_Task, 'duration_secs': 0.297112} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.330897] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a/83a6beb7-5e26-4d90-87c3-28e4f8f1e34a.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.331747] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-745ffe86-e0b3-4c23-b362-629eafd5a9b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.339898] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1113.339898] env[69994]: value = "task-2926193" [ 1113.339898] env[69994]: _type = "Task" [ 1113.339898] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.351256] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926193, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.388180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed5bede-4f47-4bcd-af12-f0a63c24a5ee tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance 'f0b77732-aae1-4790-a2c7-75586e78eda6' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1113.475899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2111cd-e1ed-4e53-819e-d8e84b2ca6d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.485028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a82ecb9-c9dc-45a4-a910-0d05d406f34f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.518789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be48d90f-d6dd-47d4-bb43-b45a22f9464d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.527279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ec4cb5-5fbc-45c9-a933-9a6dd7c42dfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.541521] env[69994]: DEBUG nova.compute.provider_tree [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.601612] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Successfully created port: 280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.707258] env[69994]: DEBUG nova.compute.manager [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1113.707522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1113.708529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea33afa-d152-4b1a-8c48-dbbd83b642bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.717687] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1113.717943] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59d5701b-e8d4-46ac-8d7e-ce622660a4ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.724977] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1113.724977] env[69994]: value = "task-2926194" [ 1113.724977] env[69994]: _type = "Task" [ 1113.724977] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.729855] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1113.737729] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.739105] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3365524f-235d-4767-abc1-cd090677d636 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.758028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1113.771546] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52aeb7a8-c28a-e947-74e0-0135393d71d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010461} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.771887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.772121] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1113.772372] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b20c823-98f1-4e63-9c0a-88b98549f39c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.781931] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1113.781931] env[69994]: value = "task-2926195" [ 1113.781931] env[69994]: _type = "Task" [ 1113.781931] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.793900] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.850956] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926193, 'name': Rename_Task, 'duration_secs': 0.341222} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.851309] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.851568] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf874192-abb7-4eba-967c-27b1f1d07367 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.860344] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1113.860344] env[69994]: value = "task-2926196" [ 1113.860344] env[69994]: _type = "Task" [ 1113.860344] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.870698] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.044430] env[69994]: DEBUG nova.scheduler.client.report [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.239248] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926194, 'name': PowerOffVM_Task, 'duration_secs': 0.223201} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.242949] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.243196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.243761] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aad68ce5-5b99-4192-9c60-7315bd50477f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.267160] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.267552] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5757aa2b-e2dd-4499-81d3-e30e41aec4f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.276375] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1114.276375] env[69994]: value = "task-2926198" [ 1114.276375] env[69994]: _type = "Task" [ 1114.276375] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.293770] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.300507] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516885} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.300865] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1114.301122] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1114.301784] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-487deaf7-a3db-4dd3-8dfa-320cc74f12ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.311360] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1114.311360] env[69994]: value = "task-2926199" [ 1114.311360] env[69994]: _type = "Task" [ 1114.311360] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.321577] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.335562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.335981] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.336329] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleting the datastore file [datastore1] 5b73cd44-6c89-4e12-9195-16b5172cbf2c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.336774] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8f87af0-4114-4b95-8ba0-79d39b4535c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.348383] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1114.348383] env[69994]: value = "task-2926200" [ 1114.348383] env[69994]: _type = "Task" [ 1114.348383] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.360210] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.372643] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926196, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.549477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.554155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.055s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.554155] env[69994]: DEBUG nova.objects.instance [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid a7dd4e89-a953-49b4-b56f-fdacef3a621b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.579196] env[69994]: INFO nova.scheduler.client.report [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted allocations for instance d5af7ae1-d68e-4170-b762-e56d7f2551d7 [ 1114.745229] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1114.770832] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1114.771099] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1114.771330] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1114.771444] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1114.771591] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1114.771735] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1114.771942] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1114.772117] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1114.772287] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1114.772450] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1114.772624] env[69994]: DEBUG nova.virt.hardware [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1114.774095] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd348878-b42c-4b07-bfc8-db5620073ac7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.791040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4624f6ba-3049-4a73-835c-30aef4c256fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.794748] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926198, 'name': PowerOffVM_Task, 'duration_secs': 0.304812} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.795278] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.795935] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1114.820797] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095794} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.821083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1114.821856] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98987457-3b40-4d42-a1f6-1f0185e87812 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.844590] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.845512] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec65bb4a-1510-4f2e-8183-2a6255a8140f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.868836] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1114.868836] env[69994]: value = "task-2926201" [ 1114.868836] env[69994]: _type = "Task" [ 1114.868836] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.871829] env[69994]: DEBUG oslo_vmware.api [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.876910] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1114.877116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1114.877295] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1114.877469] env[69994]: INFO nova.compute.manager [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1114.877696] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1114.877888] env[69994]: DEBUG oslo_vmware.api [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926196, 'name': PowerOnVM_Task, 'duration_secs': 0.646345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.878355] env[69994]: DEBUG nova.compute.manager [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1114.878444] env[69994]: DEBUG nova.network.neutron [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1114.879982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1114.880194] env[69994]: INFO nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1114.880402] env[69994]: DEBUG nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.881439] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426447d1-dc94-46e3-b085-96274f2be310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.886982] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926201, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.089110] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aab337bb-1d43-4bfc-9b6f-63d6070b1775 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "d5af7ae1-d68e-4170-b762-e56d7f2551d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.072s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.263421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.263707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.264889] env[69994]: DEBUG nova.compute.manager [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Going to confirm migration 4 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1115.302385] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1115.302839] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1115.302839] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1115.302913] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1115.303048] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1115.303278] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1115.303396] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1115.303550] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1115.303728] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1115.303875] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1115.304217] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1115.310091] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd72f8f5-8a13-4c0f-8bc1-39996692c454 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.331050] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1115.331050] env[69994]: value = "task-2926202" [ 1115.331050] env[69994]: _type = "Task" [ 1115.331050] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.338997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc49a0e-3f48-4cf6-9785-a1980d2e2276 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.350931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9f95ac-ba58-4930-a3c4-2ab6981f9b57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.355065] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926202, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.388545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a869eb-05dc-4520-8a93-14cf90b1cf04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.401284] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926201, 'name': ReconfigVM_Task, 'duration_secs': 0.320759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.406225] env[69994]: INFO nova.compute.manager [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Took 26.89 seconds to build instance. [ 1115.406620] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1115.407464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bc188cf-0641-4f31-b9ea-0247cf2d0965 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.410167] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70a93b2-f087-4a34-9aa2-6a0eda7c6b4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.427170] env[69994]: DEBUG nova.compute.provider_tree [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.430219] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1115.430219] env[69994]: value = "task-2926203" [ 1115.430219] env[69994]: _type = "Task" [ 1115.430219] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.440445] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926203, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.500699] env[69994]: DEBUG nova.compute.manager [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Received event network-vif-plugged-280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.500969] env[69994]: DEBUG oslo_concurrency.lockutils [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] Acquiring lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.501284] env[69994]: DEBUG oslo_concurrency.lockutils [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] Lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.501490] env[69994]: DEBUG oslo_concurrency.lockutils [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] Lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.501682] env[69994]: DEBUG nova.compute.manager [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] No waiting events found dispatching network-vif-plugged-280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.501866] env[69994]: WARNING nova.compute.manager [req-c01fb86e-3ef9-4035-a984-37b598eed8e6 req-7c5cd86f-bbfc-47cd-b7e1-485c46538a2f service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Received unexpected event network-vif-plugged-280e558a-fd50-4c1d-8524-0b37afc1e13e for instance with vm_state building and task_state spawning. [ 1115.623880] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Successfully updated port: 280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.821096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.821303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.821521] env[69994]: DEBUG nova.network.neutron [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.821703] env[69994]: DEBUG nova.objects.instance [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'info_cache' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.844064] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926202, 'name': ReconfigVM_Task, 'duration_secs': 0.260234} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.845072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1115.907821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc43f2e8-404e-490a-a95f-9d780fc5fcac tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.410s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.931741] env[69994]: DEBUG nova.scheduler.client.report [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.945915] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926203, 'name': Rename_Task, 'duration_secs': 0.159572} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.946195] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.946435] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e75a93bb-f3b3-4252-83f7-5f460c8e3b4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.954399] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1115.954399] env[69994]: value = "task-2926204" [ 1115.954399] env[69994]: _type = "Task" [ 1115.954399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.967028] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.014369] env[69994]: DEBUG nova.network.neutron [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.126367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.126527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.126706] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.356424] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.356743] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.357825] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.357825] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.357825] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.357825] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.357825] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.358113] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.358113] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.358312] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.358527] env[69994]: DEBUG nova.virt.hardware [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.365640] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfiguring VM instance instance-0000005f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1116.366022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16371d51-7691-4eb0-8cb8-cb6ea69c7335 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.390121] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1116.390121] env[69994]: value = "task-2926205" [ 1116.390121] env[69994]: _type = "Task" [ 1116.390121] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.405684] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.440385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.443055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.982s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.444739] env[69994]: INFO nova.compute.claims [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.465258] env[69994]: DEBUG oslo_vmware.api [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926204, 'name': PowerOnVM_Task, 'duration_secs': 0.4968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.465499] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.465716] env[69994]: INFO nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Took 6.54 seconds to spawn the instance on the hypervisor. [ 1116.465907] env[69994]: DEBUG nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.466752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1101ad50-b473-4765-b1f7-765c98ec4fcf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.470252] env[69994]: INFO nova.scheduler.client.report [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance a7dd4e89-a953-49b4-b56f-fdacef3a621b [ 1116.471730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.471730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.471847] env[69994]: INFO nova.compute.manager [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Rebooting instance [ 1116.519176] env[69994]: INFO nova.compute.manager [-] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Took 1.64 seconds to deallocate network for instance. [ 1116.671666] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.797015] env[69994]: DEBUG nova.network.neutron [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Updating instance_info_cache with network_info: [{"id": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "address": "fa:16:3e:65:61:42", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap280e558a-fd", "ovs_interfaceid": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.903216] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926205, 'name': ReconfigVM_Task, 'duration_secs': 0.178362} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.903515] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfigured VM instance instance-0000005f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1116.904318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c45c719-e225-4a4f-8036-a06da93ee2ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.927087] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.927739] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-855700c4-8591-445f-852f-dc5e386f1011 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.952697] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1116.952697] env[69994]: value = "task-2926206" [ 1116.952697] env[69994]: _type = "Task" [ 1116.952697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.962126] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926206, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.982557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc166fbd-466f-4fae-bf24-8fb75748547d tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "a7dd4e89-a953-49b4-b56f-fdacef3a621b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.940s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.990229] env[69994]: INFO nova.compute.manager [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Took 27.10 seconds to build instance. [ 1117.004470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.004606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquired lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.004783] env[69994]: DEBUG nova.network.neutron [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.024843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.159604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.159780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.183547] env[69994]: DEBUG nova.network.neutron [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.299880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.300455] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Instance network_info: |[{"id": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "address": "fa:16:3e:65:61:42", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap280e558a-fd", "ovs_interfaceid": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.300739] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:61:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '280e558a-fd50-4c1d-8524-0b37afc1e13e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.309104] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.309375] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.309660] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fedd83ea-28a1-4514-a63d-c8aadff04f00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.332367] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.332367] env[69994]: value = "task-2926207" [ 1117.332367] env[69994]: _type = "Task" [ 1117.332367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.342408] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926207, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.471265] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926206, 'name': ReconfigVM_Task, 'duration_secs': 0.321927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.471265] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Reconfigured VM instance instance-0000005f to attach disk [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236/c7c17fab-71a4-44df-907e-f7b408f80236.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.471265] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1117.492819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-940357c6-1a89-433e-b84c-887cd1b14a26 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.615s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.534250] env[69994]: DEBUG nova.compute.manager [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Received event network-vif-deleted-27c41d65-6213-4b24-b4b8-08a5175c26ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.534545] env[69994]: DEBUG nova.compute.manager [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Received event network-changed-280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.534659] env[69994]: DEBUG nova.compute.manager [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Refreshing instance network info cache due to event network-changed-280e558a-fd50-4c1d-8524-0b37afc1e13e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.534871] env[69994]: DEBUG oslo_concurrency.lockutils [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] Acquiring lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.535033] env[69994]: DEBUG oslo_concurrency.lockutils [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] Acquired lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.535185] env[69994]: DEBUG nova.network.neutron [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Refreshing network info cache for port 280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.663742] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.688604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.688879] env[69994]: DEBUG nova.objects.instance [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'migration_context' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.724345] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84059f0-941e-4a22-a408-5fb0114b01b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.733733] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb77d870-959b-4c5c-a967-a4768b038cd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.769128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e34f5da-fd4e-42ee-b4e5-07ced5493ef2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.777986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80d9c41-0dc6-4c3a-8005-89e1b45554e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.795960] env[69994]: DEBUG nova.compute.provider_tree [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.845489] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926207, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.933295] env[69994]: DEBUG nova.network.neutron [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updating instance_info_cache with network_info: [{"id": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "address": "fa:16:3e:22:b2:41", "network": {"id": "2f9ca5ad-077e-48f8-a832-45aad5cb5b58", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-268034529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99cb9732709047989856dae779a21f2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b5b8c81-ea", "ovs_interfaceid": "8b5b8c81-eaad-48a6-840d-849dd8ea42b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.978067] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1f58d4-40b2-4672-b510-c1d7ae1b51fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.001019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac42c86e-22c9-48b4-be79-6bbcae57171e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.020966] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.192191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.195059] env[69994]: DEBUG nova.objects.base [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1118.195948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9430f9-cd7a-42fa-9927-f1b6edda6b50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.218185] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-637dea39-9142-42e6-bf7c-f5c94b0749cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.224267] env[69994]: DEBUG oslo_vmware.api [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1118.224267] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c2f0e-f8b2-4d1a-f4cb-16e8e3ea0e45" [ 1118.224267] env[69994]: _type = "Task" [ 1118.224267] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.232497] env[69994]: DEBUG oslo_vmware.api [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c2f0e-f8b2-4d1a-f4cb-16e8e3ea0e45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.251908] env[69994]: DEBUG nova.network.neutron [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Updated VIF entry in instance network info cache for port 280e558a-fd50-4c1d-8524-0b37afc1e13e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.252287] env[69994]: DEBUG nova.network.neutron [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Updating instance_info_cache with network_info: [{"id": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "address": "fa:16:3e:65:61:42", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap280e558a-fd", "ovs_interfaceid": "280e558a-fd50-4c1d-8524-0b37afc1e13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.299148] env[69994]: DEBUG nova.scheduler.client.report [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.349703] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926207, 'name': CreateVM_Task, 'duration_secs': 0.543866} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.349831] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.350721] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.350900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.351247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.351519] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffeb89a-1134-45f7-ba93-cb75e902fa5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.357145] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1118.357145] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521aa330-72aa-f6a0-2682-abe07d6d2946" [ 1118.357145] env[69994]: _type = "Task" [ 1118.357145] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.365136] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521aa330-72aa-f6a0-2682-abe07d6d2946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.436425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Releasing lock "refresh_cache-83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.560815] env[69994]: DEBUG nova.network.neutron [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Port cd532a29-1d4e-4026-89d2-9ef034f808a1 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1118.608936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.609132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.735402] env[69994]: DEBUG oslo_vmware.api [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520c2f0e-f8b2-4d1a-f4cb-16e8e3ea0e45, 'name': SearchDatastore_Task, 'duration_secs': 0.009552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.735757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.755151] env[69994]: DEBUG oslo_concurrency.lockutils [req-91fec023-f18c-44c7-9550-8398a4d70f2a req-0f11fd8a-5077-4841-862b-9892eee22801 service nova] Releasing lock "refresh_cache-85b58e95-04fd-45ff-ac60-d0167031e148" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.804366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.805065] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1118.807672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.808383] env[69994]: DEBUG nova.objects.instance [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'resources' on Instance uuid 5b73cd44-6c89-4e12-9195-16b5172cbf2c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.868629] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521aa330-72aa-f6a0-2682-abe07d6d2946, 'name': SearchDatastore_Task, 'duration_secs': 0.009581} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.868938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.869200] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.869459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.869636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.869823] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.870101] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ac6ca0a-6e72-49ab-b81c-e16e4b969e34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.880079] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.880382] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.881210] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4ae3727-23f2-4df4-92a1-5eca48bd352c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.887519] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1118.887519] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0aaa8-b36a-8a9d-8e09-4ce2727a2106" [ 1118.887519] env[69994]: _type = "Task" [ 1118.887519] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.897178] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0aaa8-b36a-8a9d-8e09-4ce2727a2106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.941823] env[69994]: DEBUG nova.compute.manager [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.942740] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13decbef-ba7e-427a-8c2a-a7fe0cd9a047 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.111995] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1119.315221] env[69994]: DEBUG nova.compute.utils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1119.316869] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1119.317053] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1119.379670] env[69994]: DEBUG nova.policy [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56900b2a71cc423a868f3c1b81f70172', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2417f6585042417c95491eb3d7cba343', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1119.403162] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b0aaa8-b36a-8a9d-8e09-4ce2727a2106, 'name': SearchDatastore_Task, 'duration_secs': 0.013379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.403977] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aca2cfe5-5d5e-4bed-9500-f5ce8f22cb98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.415196] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1119.415196] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5234670d-060e-6821-67da-9c6af4321b30" [ 1119.415196] env[69994]: _type = "Task" [ 1119.415196] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.426404] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5234670d-060e-6821-67da-9c6af4321b30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.565853] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4787fc-b4d8-47a4-a7bf-a1c6d00066b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.580386] env[69994]: DEBUG nova.compute.manager [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Received event network-changed-29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.580591] env[69994]: DEBUG nova.compute.manager [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Refreshing instance network info cache due to event network-changed-29cc87df-3c6e-45eb-a80d-5127f53062e1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1119.580805] env[69994]: DEBUG oslo_concurrency.lockutils [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] Acquiring lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.580946] env[69994]: DEBUG oslo_concurrency.lockutils [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] Acquired lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.581119] env[69994]: DEBUG nova.network.neutron [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Refreshing network info cache for port 29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1119.592298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.592298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.592298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.594867] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9068038b-edb9-4b53-988b-dbce2fb0e3e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.646143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdd6567-d524-43b3-8a1e-163a07f60b1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.654597] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77890257-cfa6-44c5-97bf-77f0705821d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.658933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.673059] env[69994]: DEBUG nova.compute.provider_tree [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.683245] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Successfully created port: 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1119.820843] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1119.925602] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5234670d-060e-6821-67da-9c6af4321b30, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.926145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.926411] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 85b58e95-04fd-45ff-ac60-d0167031e148/85b58e95-04fd-45ff-ac60-d0167031e148.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.926676] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a1f2498-ede7-42f4-ba83-a9e25e1e4994 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.936599] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1119.936599] env[69994]: value = "task-2926208" [ 1119.936599] env[69994]: _type = "Task" [ 1119.936599] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.944381] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.961311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a49a28-fa9b-45ea-abce-e02bbb3f5413 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.968041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1119.968349] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0e736f88-24b7-4820-a559-653b1abfd539 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.974941] env[69994]: DEBUG oslo_vmware.api [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1119.974941] env[69994]: value = "task-2926209" [ 1119.974941] env[69994]: _type = "Task" [ 1119.974941] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.982600] env[69994]: DEBUG oslo_vmware.api [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926209, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.175551] env[69994]: DEBUG nova.scheduler.client.report [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.344362] env[69994]: DEBUG nova.network.neutron [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updated VIF entry in instance network info cache for port 29cc87df-3c6e-45eb-a80d-5127f53062e1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1120.344871] env[69994]: DEBUG nova.network.neutron [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating instance_info_cache with network_info: [{"id": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "address": "fa:16:3e:61:2f:ee", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29cc87df-3c", "ovs_interfaceid": "29cc87df-3c6e-45eb-a80d-5127f53062e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.446068] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.446341] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 85b58e95-04fd-45ff-ac60-d0167031e148/85b58e95-04fd-45ff-ac60-d0167031e148.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.446560] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.446803] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba5cdf8d-071e-4727-a415-dc8eb1998e4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.454399] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1120.454399] env[69994]: value = "task-2926210" [ 1120.454399] env[69994]: _type = "Task" [ 1120.454399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.463400] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.485039] env[69994]: DEBUG oslo_vmware.api [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926209, 'name': ResetVM_Task, 'duration_secs': 0.101655} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.485039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1120.485039] env[69994]: DEBUG nova.compute.manager [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.485827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25309401-777f-4190-80d1-6ef15eca95b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.627454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.627709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.627844] env[69994]: DEBUG nova.network.neutron [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1120.680883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.683250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.491s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.685209] env[69994]: INFO nova.compute.claims [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.705125] env[69994]: INFO nova.scheduler.client.report [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted allocations for instance 5b73cd44-6c89-4e12-9195-16b5172cbf2c [ 1120.833324] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1120.852790] env[69994]: DEBUG oslo_concurrency.lockutils [req-859a6258-5dd4-4050-8f8e-f7a7ced14d5a req-e06e050d-9167-49fd-b042-c27b8b1217f8 service nova] Releasing lock "refresh_cache-60f6d502-0fef-4764-8c1f-1b1d5ab3db41" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.859116] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.859346] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.859646] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.859855] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.860012] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.860173] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.860435] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.860601] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.860765] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.860924] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.861109] env[69994]: DEBUG nova.virt.hardware [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.861967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae4b5be-00b5-49d1-9b58-14a9e70c58d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.869801] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2e6ab9-1d9a-4f7c-9d52-ea9d1b359d3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.963499] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.963641] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.964458] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0abfff-f179-464c-9266-850ac2c3ecc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.988093] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 85b58e95-04fd-45ff-ac60-d0167031e148/85b58e95-04fd-45ff-ac60-d0167031e148.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.988406] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb147e14-1c51-448a-a800-8425318625e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.005728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b99cac6a-b16a-432b-a3ef-c72018b69fda tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.534s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.009548] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1121.009548] env[69994]: value = "task-2926211" [ 1121.009548] env[69994]: _type = "Task" [ 1121.009548] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.019096] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.212665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59342719-15cb-4d8e-9b7e-9a0df31835ed tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "5b73cd44-6c89-4e12-9195-16b5172cbf2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.011s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.361741] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Successfully updated port: 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1121.414639] env[69994]: DEBUG nova.network.neutron [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.519188] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.595788] env[69994]: DEBUG nova.compute.manager [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Received event network-vif-plugged-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.596015] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.596225] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.596396] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.596562] env[69994]: DEBUG nova.compute.manager [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] No waiting events found dispatching network-vif-plugged-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1121.596814] env[69994]: WARNING nova.compute.manager [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Received unexpected event network-vif-plugged-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 for instance with vm_state building and task_state spawning. [ 1121.596998] env[69994]: DEBUG nova.compute.manager [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Received event network-changed-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.597209] env[69994]: DEBUG nova.compute.manager [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Refreshing instance network info cache due to event network-changed-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.597498] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.597675] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.597853] env[69994]: DEBUG nova.network.neutron [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Refreshing network info cache for port 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.864651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.917420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.926899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb36874-851f-48c6-9c24-4133c173cb17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.935059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b16ade-5873-4450-b0e1-9fc67353b8bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.969888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1682fff0-622d-48cb-b2a0-273dd378e9ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.978117] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790a7c4f-96f6-4b55-a248-dcf2abd180b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.993322] env[69994]: DEBUG nova.compute.provider_tree [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.020340] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.133846] env[69994]: DEBUG nova.network.neutron [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1122.223287] env[69994]: DEBUG nova.network.neutron [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.302577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.302858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.303089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.303278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.303444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.308187] env[69994]: INFO nova.compute.manager [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Terminating instance [ 1122.439165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd76371e-0d36-4374-aeec-b888640c1195 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.458990] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b76da9-7232-4e9c-8384-bbd4dbf701da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.465998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.496116] env[69994]: DEBUG nova.scheduler.client.report [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.520752] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926211, 'name': ReconfigVM_Task, 'duration_secs': 1.110351} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.521056] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 85b58e95-04fd-45ff-ac60-d0167031e148/85b58e95-04fd-45ff-ac60-d0167031e148.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.521815] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67564a8d-d70d-4ecc-b0d7-1a7a92c84816 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.528158] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1122.528158] env[69994]: value = "task-2926212" [ 1122.528158] env[69994]: _type = "Task" [ 1122.528158] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.536700] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926212, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.726106] env[69994]: DEBUG oslo_concurrency.lockutils [req-3adfaae1-a27f-4698-b997-999da3353488 req-906e81b6-35b0-4a35-8bb3-1c7f457b2534 service nova] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.726525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.726699] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1122.811359] env[69994]: DEBUG nova.compute.manager [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.812129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.812503] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187a4e2a-f708-4c94-8b3d-896fbe3f481c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.819575] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.819818] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2286ffd5-3ee7-4ceb-8595-431ed5f7d88d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.826424] env[69994]: DEBUG oslo_vmware.api [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1122.826424] env[69994]: value = "task-2926213" [ 1122.826424] env[69994]: _type = "Task" [ 1122.826424] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.520849] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.521527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.838s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.521966] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1123.530158] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08f159ab-4404-410c-ab38-a4bf5cb84cc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.531251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.796s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.541149] env[69994]: DEBUG oslo_vmware.api [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926213, 'name': PowerOffVM_Task, 'duration_secs': 0.188103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.548020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1123.548020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1123.548020] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926212, 'name': Rename_Task, 'duration_secs': 0.178378} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.548020] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1123.548020] env[69994]: value = "task-2926214" [ 1123.548020] env[69994]: _type = "Task" [ 1123.548020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.548020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12484de2-f1a9-4a66-9dba-338bd4c1b8e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.548620] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.548861] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-038cfaf1-5bc7-48e8-a353-8cc7431a84aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.563325] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926214, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.565226] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1123.565226] env[69994]: value = "task-2926216" [ 1123.565226] env[69994]: _type = "Task" [ 1123.565226] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.576248] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.580836] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1123.624349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1123.624349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1123.624349] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Deleting the datastore file [datastore1] 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1123.624349] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e25adc9b-41da-47f7-9ff3-99706a2fb476 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.635375] env[69994]: DEBUG oslo_vmware.api [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for the task: (returnval){ [ 1123.635375] env[69994]: value = "task-2926217" [ 1123.635375] env[69994]: _type = "Task" [ 1123.635375] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.648428] env[69994]: DEBUG oslo_vmware.api [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.743143] env[69994]: DEBUG nova.network.neutron [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.029758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.029989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.032401] env[69994]: DEBUG nova.compute.utils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1124.034354] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1124.034521] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1124.059219] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926214, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.075498] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926216, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.079661] env[69994]: DEBUG nova.policy [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1124.151114] env[69994]: DEBUG oslo_vmware.api [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Task: {'id': task-2926217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.151303] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.151694] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1124.151916] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1124.152183] env[69994]: INFO nova.compute.manager [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1124.152490] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.155884] env[69994]: DEBUG nova.compute.manager [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1124.155884] env[69994]: DEBUG nova.network.neutron [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1124.246113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.246367] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance network_info: |[{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1124.247242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:06:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c872b8c-ae3b-4523-a7c5-2a3ed82baac3', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1124.255774] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating folder: Project (2417f6585042417c95491eb3d7cba343). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1124.256241] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-247e944a-e7d0-4972-98ec-b66e0778406c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.265961] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created folder: Project (2417f6585042417c95491eb3d7cba343) in parent group-v587342. [ 1124.266405] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating folder: Instances. Parent ref: group-v587622. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1124.266735] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c14065f-129d-4651-a829-657f05f76884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.277014] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created folder: Instances in parent group-v587622. [ 1124.277014] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.277014] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1124.277014] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29f29fbd-8dc5-4b87-919d-52ebbe7d7553 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.303022] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1124.303022] env[69994]: value = "task-2926220" [ 1124.303022] env[69994]: _type = "Task" [ 1124.303022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.309351] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926220, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.313110] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3681f4f3-eab8-44aa-9e7d-762620b1ebad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.318164] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7fae40-2f1d-4c49-a10c-f6e3562595ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.349497] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ba96d0-c2fb-43e7-bcf8-e956f5c0678f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.364034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4a599f-fc20-4a3b-b5aa-3f285f575500 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.380317] env[69994]: DEBUG nova.compute.provider_tree [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.535764] env[69994]: INFO nova.compute.manager [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Detaching volume dc4ef527-168d-4d24-a145-554ce6a61cad [ 1124.538394] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1124.559018] env[69994]: DEBUG oslo_vmware.api [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926214, 'name': PowerOnVM_Task, 'duration_secs': 0.58235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.559138] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.559331] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b878cf69-691d-4a66-89d5-6a79b356f79a tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance 'c7c17fab-71a4-44df-907e-f7b408f80236' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.565197] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Successfully created port: be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1124.569994] env[69994]: DEBUG nova.compute.manager [req-6a964d75-2c11-4660-9d64-a00185f23818 req-40d8f0d3-00c1-4f8c-9523-fdad105790da service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Received event network-vif-deleted-8b5b8c81-eaad-48a6-840d-849dd8ea42b8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1124.570507] env[69994]: INFO nova.compute.manager [req-6a964d75-2c11-4660-9d64-a00185f23818 req-40d8f0d3-00c1-4f8c-9523-fdad105790da service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Neutron deleted interface 8b5b8c81-eaad-48a6-840d-849dd8ea42b8; detaching it from the instance and deleting it from the info cache [ 1124.570579] env[69994]: DEBUG nova.network.neutron [req-6a964d75-2c11-4660-9d64-a00185f23818 req-40d8f0d3-00c1-4f8c-9523-fdad105790da service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.576847] env[69994]: INFO nova.virt.block_device [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Attempting to driver detach volume dc4ef527-168d-4d24-a145-554ce6a61cad from mountpoint /dev/sdb [ 1124.577105] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1124.577297] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587603', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'name': 'volume-dc4ef527-168d-4d24-a145-554ce6a61cad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68eba44a-0989-47dc-a88b-102d9aa34c5d', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'serial': 'dc4ef527-168d-4d24-a145-554ce6a61cad'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1124.578117] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9a6b98-b0eb-45cf-aee5-69f8d205a713 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.586622] env[69994]: DEBUG oslo_vmware.api [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926216, 'name': PowerOnVM_Task, 'duration_secs': 0.676398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.603063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.603310] env[69994]: INFO nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1124.603493] env[69994]: DEBUG nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.604662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d136ef35-ca4f-4eab-bad1-2f2cdf0a4a83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.608237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e309b6ef-b854-4d72-84e9-4dce3fab4372 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.622693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0e0915-4b17-475c-9264-77109809c95f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.644687] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d616664-605d-448d-b9da-57bd26cb4dbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.660654] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] The volume has not been displaced from its original location: [datastore2] volume-dc4ef527-168d-4d24-a145-554ce6a61cad/volume-dc4ef527-168d-4d24-a145-554ce6a61cad.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1124.668446] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1124.668446] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-333793ff-e890-4339-9088-a9b09f5cd3d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.686528] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1124.686528] env[69994]: value = "task-2926221" [ 1124.686528] env[69994]: _type = "Task" [ 1124.686528] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.697229] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926221, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.814008] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926220, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.883805] env[69994]: DEBUG nova.scheduler.client.report [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.046798] env[69994]: DEBUG nova.network.neutron [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.072432] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12a4b7b6-55df-4e90-ac94-f9d53f2ca01d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.082315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15464a3a-7556-4760-be6b-90f55d24f036 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.114524] env[69994]: DEBUG nova.compute.manager [req-6a964d75-2c11-4660-9d64-a00185f23818 req-40d8f0d3-00c1-4f8c-9523-fdad105790da service nova] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Detach interface failed, port_id=8b5b8c81-eaad-48a6-840d-849dd8ea42b8, reason: Instance 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1125.130077] env[69994]: INFO nova.compute.manager [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Took 31.30 seconds to build instance. [ 1125.197326] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.311831] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926220, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.549458] env[69994]: INFO nova.compute.manager [-] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Took 1.39 seconds to deallocate network for instance. [ 1125.550659] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1125.588133] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.588420] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.588655] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.588815] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.589080] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.592470] env[69994]: DEBUG nova.virt.hardware [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.595843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0118a6b4-8d65-47cf-a003-1f439cd2ee50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.606382] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bb3a7d-5a6d-448e-a8ed-692973494429 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.632662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e19057aa-723a-4264-b16c-6f27b707e647 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.805s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.698589] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.811811] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926220, 'name': CreateVM_Task, 'duration_secs': 1.480334} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.811989] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1125.813028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.813155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.814048] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1125.814048] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea27748d-731c-43bb-8152-15081bd54aea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.817982] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1125.817982] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b731f4-a4b5-fe4d-49d9-261d210951c3" [ 1125.817982] env[69994]: _type = "Task" [ 1125.817982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.826166] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b731f4-a4b5-fe4d-49d9-261d210951c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.895845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.364s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.896116] env[69994]: DEBUG nova.compute.manager [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69994) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1125.899088] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.240s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.900611] env[69994]: INFO nova.compute.claims [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.059870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.182148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "85b58e95-04fd-45ff-ac60-d0167031e148" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.182437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.182626] env[69994]: DEBUG nova.compute.manager [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1126.183591] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb3483f-b694-450e-b549-f6d6e32a2292 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.198020] env[69994]: DEBUG nova.compute.manager [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1126.198020] env[69994]: DEBUG nova.objects.instance [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'flavor' on Instance uuid 85b58e95-04fd-45ff-ac60-d0167031e148 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.203562] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926221, 'name': ReconfigVM_Task, 'duration_secs': 1.30464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.203767] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1126.210998] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa943b85-cbfd-480a-bba5-902ef25eef8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.223644] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Successfully updated port: be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1126.231841] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1126.231841] env[69994]: value = "task-2926222" [ 1126.231841] env[69994]: _type = "Task" [ 1126.231841] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.243331] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926222, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.334585] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b731f4-a4b5-fe4d-49d9-261d210951c3, 'name': SearchDatastore_Task, 'duration_secs': 0.027345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.334585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.334585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.334585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.334585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.334585] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.334585] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-895c2394-6743-429b-a5ba-a85af1a1fe6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.343547] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.344410] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1126.344783] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea5113ac-3fd8-48ec-b6d0-4200b39a2dfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.351046] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1126.351046] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d4c97-db2f-2796-6c71-5715f2cb33a9" [ 1126.351046] env[69994]: _type = "Task" [ 1126.351046] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.359848] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d4c97-db2f-2796-6c71-5715f2cb33a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.457611] env[69994]: INFO nova.scheduler.client.report [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted allocation for migration 740fc0de-ab50-4550-98f1-7276a9ece169 [ 1126.600054] env[69994]: DEBUG nova.compute.manager [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-vif-plugged-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.600417] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.600603] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.600952] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.601193] env[69994]: DEBUG nova.compute.manager [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] No waiting events found dispatching network-vif-plugged-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1126.601433] env[69994]: WARNING nova.compute.manager [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received unexpected event network-vif-plugged-be3723ea-e18d-4908-bb9b-d8bbce5d3cee for instance with vm_state building and task_state spawning. [ 1126.602026] env[69994]: DEBUG nova.compute.manager [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.602026] env[69994]: DEBUG nova.compute.manager [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1126.602234] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.602312] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.602488] env[69994]: DEBUG nova.network.neutron [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.726446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.742621] env[69994]: DEBUG oslo_vmware.api [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926222, 'name': ReconfigVM_Task, 'duration_secs': 0.137464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.742858] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587603', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'name': 'volume-dc4ef527-168d-4d24-a145-554ce6a61cad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68eba44a-0989-47dc-a88b-102d9aa34c5d', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4ef527-168d-4d24-a145-554ce6a61cad', 'serial': 'dc4ef527-168d-4d24-a145-554ce6a61cad'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1126.830217] env[69994]: DEBUG nova.objects.instance [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.865242] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528d4c97-db2f-2796-6c71-5715f2cb33a9, 'name': SearchDatastore_Task, 'duration_secs': 0.013496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.865242] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf57ec2f-6f9b-41a2-91f9-ec26dea7750a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.869825] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1126.869825] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e39ef2-214b-fb47-4888-b52c75aefd19" [ 1126.869825] env[69994]: _type = "Task" [ 1126.869825] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.878981] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e39ef2-214b-fb47-4888-b52c75aefd19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.964523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb7727f2-05d9-441f-975e-6ee5ad6d8c42 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.701s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.123258] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77159a1-9354-4fde-ad13-855dcda18b59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.130742] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd04a8a7-6142-4dad-b94b-21935757e57f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.134869] env[69994]: DEBUG nova.network.neutron [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.165266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.165667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.165928] env[69994]: DEBUG nova.compute.manager [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Going to confirm migration 5 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1127.168630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d495d5e6-d624-485e-bd98-02143e979ba6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.180321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03dfbf2-c2f9-4dfd-a15c-4c746ec8cb78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.197015] env[69994]: DEBUG nova.compute.provider_tree [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.202787] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.203654] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef2f846f-75c0-4c7d-ad41-8e7342291723 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.209307] env[69994]: DEBUG nova.network.neutron [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.212695] env[69994]: DEBUG oslo_vmware.api [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1127.212695] env[69994]: value = "task-2926223" [ 1127.212695] env[69994]: _type = "Task" [ 1127.212695] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.222193] env[69994]: DEBUG oslo_vmware.api [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.287215] env[69994]: DEBUG nova.objects.instance [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.335262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.335397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.335667] env[69994]: DEBUG nova.network.neutron [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.335722] env[69994]: DEBUG nova.objects.instance [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'info_cache' on Instance uuid f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.380649] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e39ef2-214b-fb47-4888-b52c75aefd19, 'name': SearchDatastore_Task, 'duration_secs': 0.011468} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.380931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.381217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1127.381515] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4df5dc26-df0f-4ef2-82bf-639135b87e24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.388421] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1127.388421] env[69994]: value = "task-2926224" [ 1127.388421] env[69994]: _type = "Task" [ 1127.388421] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.397189] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.700910] env[69994]: DEBUG nova.scheduler.client.report [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.706182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.706361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.706534] env[69994]: DEBUG nova.network.neutron [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.706716] env[69994]: DEBUG nova.objects.instance [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'info_cache' on Instance uuid c7c17fab-71a4-44df-907e-f7b408f80236 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.711617] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bd90371-e1ec-48d3-aa08-3bdc111a7e8f req-9b318b70-4ec8-4ae7-9c84-5bff7eb44411 service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.711967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.712139] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.722307] env[69994]: DEBUG oslo_vmware.api [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926223, 'name': PowerOffVM_Task, 'duration_secs': 0.208886} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.723142] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.723342] env[69994]: DEBUG nova.compute.manager [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.724471] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57b3ee7-06f0-4f4b-9c59-5dc543297c92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.839044] env[69994]: DEBUG nova.objects.base [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1127.901566] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926224, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.205543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.206090] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1128.208750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.149s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.208947] env[69994]: DEBUG nova.objects.instance [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lazy-loading 'resources' on Instance uuid 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.239194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f03aec8d-31d6-4df6-9c5a-d86d2a6787f7 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.259847] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1128.295900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc55ea62-ba66-40a8-a399-79720e122c7b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.265s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.399810] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687732} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.400277] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1128.400660] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.401026] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-396e5094-a821-46fc-83ad-2137f1d19702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.409098] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1128.409098] env[69994]: value = "task-2926225" [ 1128.409098] env[69994]: _type = "Task" [ 1128.409098] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.419291] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.577919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "85b58e95-04fd-45ff-ac60-d0167031e148" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.578483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.578823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.579129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.579408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.586536] env[69994]: INFO nova.compute.manager [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Terminating instance [ 1128.665641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.665930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.666135] env[69994]: DEBUG nova.compute.manager [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1128.667411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb5e286-6ac2-425c-8d96-76fe28b4cf79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.674532] env[69994]: DEBUG nova.compute.manager [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1128.675229] env[69994]: DEBUG nova.objects.instance [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.715544] env[69994]: DEBUG nova.compute.utils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1128.719513] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1128.719799] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1128.798205] env[69994]: DEBUG nova.network.neutron [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.821544] env[69994]: DEBUG nova.policy [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4543702642614e079383389379629d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbe936f4d284e73999846251269fefd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.922863] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06512} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.923154] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1128.923964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27ebd7a-c7ac-4ddb-a888-5b351837ffab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.959278] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1128.962688] env[69994]: DEBUG nova.network.neutron [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [{"id": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "address": "fa:16:3e:fd:73:3a", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0159af-9c", "ovs_interfaceid": "0f0159af-9c04-46fe-8fac-ebd620726fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.964078] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2059559e-0a7d-46c3-ba33-51f5392e4909 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.990019] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1128.990019] env[69994]: value = "task-2926226" [ 1128.990019] env[69994]: _type = "Task" [ 1128.990019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.995286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1ad9b8-d290-4d05-998e-46525d0bba2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.001620] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.006960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc5baf7-166d-4cef-87d6-342dc3309c4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.043816] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b47e63-a774-440b-aee9-56b0cb36d6e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.052282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641e325c-2a60-473f-9533-14c0595e0289 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.068850] env[69994]: DEBUG nova.compute.provider_tree [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.092528] env[69994]: DEBUG nova.compute.manager [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1129.092838] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.093723] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcdad38-c874-4af6-9ba9-9f164f7b4601 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.102243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.102529] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6ef9db1-a31d-45e0-9dfb-2df5accb5faf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.158142] env[69994]: DEBUG nova.network.neutron [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [{"id": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "address": "fa:16:3e:0f:cb:2f", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd532a29-1d", "ovs_interfaceid": "cd532a29-1d4e-4026-89d2-9ef034f808a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.166272] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.166487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.166681] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore1] 85b58e95-04fd-45ff-ac60-d0167031e148 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.166991] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad8e4a7a-c476-4407-b3cf-30bfa175f80d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.174141] env[69994]: DEBUG oslo_vmware.api [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1129.174141] env[69994]: value = "task-2926228" [ 1129.174141] env[69994]: _type = "Task" [ 1129.174141] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.184844] env[69994]: DEBUG oslo_vmware.api [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.220522] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1129.268100] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Successfully created port: 467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.300334] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.300334] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance network_info: |[{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1129.300819] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:fd:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be3723ea-e18d-4908-bb9b-d8bbce5d3cee', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.309390] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.309949] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1129.310209] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4058a4d3-a05f-4e86-9314-9b65ef964a1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.332296] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.332296] env[69994]: value = "task-2926229" [ 1129.332296] env[69994]: _type = "Task" [ 1129.332296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.338541] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926229, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.479594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-f0b77732-aae1-4790-a2c7-75586e78eda6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.499421] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926226, 'name': ReconfigVM_Task, 'duration_secs': 0.311403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.499701] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.500369] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f21e73fb-f01f-4fab-8071-7f30f35c24db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.508016] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1129.508016] env[69994]: value = "task-2926230" [ 1129.508016] env[69994]: _type = "Task" [ 1129.508016] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.528067] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926230, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.572572] env[69994]: DEBUG nova.scheduler.client.report [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.661613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-c7c17fab-71a4-44df-907e-f7b408f80236" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.661924] env[69994]: DEBUG nova.objects.instance [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'migration_context' on Instance uuid c7c17fab-71a4-44df-907e-f7b408f80236 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.683520] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.683625] env[69994]: DEBUG oslo_vmware.api [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302341} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.683769] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e023b303-fa0d-4b31-8f91-906c326caf80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.685338] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.685529] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.685707] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.685921] env[69994]: INFO nova.compute.manager [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1129.686109] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.686299] env[69994]: DEBUG nova.compute.manager [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1129.686393] env[69994]: DEBUG nova.network.neutron [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.692664] env[69994]: DEBUG oslo_vmware.api [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1129.692664] env[69994]: value = "task-2926231" [ 1129.692664] env[69994]: _type = "Task" [ 1129.692664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.700138] env[69994]: DEBUG oslo_vmware.api [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.841092] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926229, 'name': CreateVM_Task, 'duration_secs': 0.332803} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.841379] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.841963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.842142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.842454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1129.842749] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2007326-0eeb-4bd8-b1f2-d9df9cb29a24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.848400] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1129.848400] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f51654-dff6-b014-396c-ec2d000b7d64" [ 1129.848400] env[69994]: _type = "Task" [ 1129.848400] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.858795] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f51654-dff6-b014-396c-ec2d000b7d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.860277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.860531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.860688] env[69994]: INFO nova.compute.manager [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Shelving [ 1130.018029] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926230, 'name': Rename_Task, 'duration_secs': 0.147313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.018635] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1130.018931] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3775167c-6115-4ab6-afa1-ad7dca1d0b17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.025115] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1130.025115] env[69994]: value = "task-2926232" [ 1130.025115] env[69994]: _type = "Task" [ 1130.025115] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.032979] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.075731] env[69994]: DEBUG nova.compute.manager [req-faf810ef-db34-462e-84a9-ac2829f15453 req-3ff74e35-855c-4fc3-a37c-68cb67a2944c service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Received event network-vif-deleted-280e558a-fd50-4c1d-8524-0b37afc1e13e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.075731] env[69994]: INFO nova.compute.manager [req-faf810ef-db34-462e-84a9-ac2829f15453 req-3ff74e35-855c-4fc3-a37c-68cb67a2944c service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Neutron deleted interface 280e558a-fd50-4c1d-8524-0b37afc1e13e; detaching it from the instance and deleting it from the info cache [ 1130.075731] env[69994]: DEBUG nova.network.neutron [req-faf810ef-db34-462e-84a9-ac2829f15453 req-3ff74e35-855c-4fc3-a37c-68cb67a2944c service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.080678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.106434] env[69994]: INFO nova.scheduler.client.report [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Deleted allocations for instance 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a [ 1130.164944] env[69994]: DEBUG nova.objects.base [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1130.165882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbddefc3-de78-4164-81f5-463abdfe4e0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.187867] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b2d3ca9-c044-4378-8dbd-9fac5cc4f6d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.193762] env[69994]: DEBUG oslo_vmware.api [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1130.193762] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521c6c24-2154-903d-8093-9db08545c937" [ 1130.193762] env[69994]: _type = "Task" [ 1130.193762] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.209049] env[69994]: DEBUG oslo_vmware.api [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521c6c24-2154-903d-8093-9db08545c937, 'name': SearchDatastore_Task, 'duration_secs': 0.008913} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.209322] env[69994]: DEBUG oslo_vmware.api [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926231, 'name': PowerOffVM_Task, 'duration_secs': 0.170325} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.209842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.210125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.211629] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1130.211629] env[69994]: DEBUG nova.compute.manager [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1130.212738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b261358b-e177-4da4-8dcc-b70da0c24098 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.231583] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1130.257880] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.258156] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.258370] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.258622] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.258783] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.259048] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.259202] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.259360] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.259527] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.259689] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.259860] env[69994]: DEBUG nova.virt.hardware [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.261023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a37212-6317-4d45-bd69-ff7b08326155 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.269224] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17942ef-df6c-463e-9f2e-e93de3735eb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.357950] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f51654-dff6-b014-396c-ec2d000b7d64, 'name': SearchDatastore_Task, 'duration_secs': 0.012847} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.358303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.358566] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.358810] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.358963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.359151] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.359425] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b1f47ad-b558-4157-b303-dfaf070cfa84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.369233] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.369423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1130.370561] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8741c8d-9d00-42e7-8d1b-54c80600b068 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.375981] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1130.375981] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ef129-7372-e63c-260b-d3525526af21" [ 1130.375981] env[69994]: _type = "Task" [ 1130.375981] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.385091] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ef129-7372-e63c-260b-d3525526af21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.488668] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1130.488975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd00bf92-b002-4eb3-8816-2e9d51b83362 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.496496] env[69994]: DEBUG oslo_vmware.api [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1130.496496] env[69994]: value = "task-2926233" [ 1130.496496] env[69994]: _type = "Task" [ 1130.496496] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.504060] env[69994]: DEBUG oslo_vmware.api [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.534972] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926232, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.538426] env[69994]: DEBUG nova.network.neutron [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.578888] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62a39ceb-c6a4-4461-844b-cf904b478677 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.589011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8d1834-84d1-43f4-acd1-461732876267 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.629260] env[69994]: DEBUG nova.compute.manager [req-faf810ef-db34-462e-84a9-ac2829f15453 req-3ff74e35-855c-4fc3-a37c-68cb67a2944c service nova] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Detach interface failed, port_id=280e558a-fd50-4c1d-8524-0b37afc1e13e, reason: Instance 85b58e95-04fd-45ff-ac60-d0167031e148 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1130.629974] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1fd82f2-7032-4d03-9e11-09ab860353b2 tempest-InstanceActionsTestJSON-978291315 tempest-InstanceActionsTestJSON-978291315-project-member] Lock "83a6beb7-5e26-4d90-87c3-28e4f8f1e34a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.327s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.726338] env[69994]: DEBUG oslo_concurrency.lockutils [None req-749091bc-8fb9-446a-bbdb-4d96343ca5a4 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.874582] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1130.875031] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d11fd897-bc8e-45cc-877a-d11fda2151d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.890942] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1130.890942] env[69994]: value = "task-2926234" [ 1130.890942] env[69994]: _type = "Task" [ 1130.890942] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.895540] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ef129-7372-e63c-260b-d3525526af21, 'name': SearchDatastore_Task, 'duration_secs': 0.010927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.899670] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a694bb9c-4b6a-402e-8761-ef1eb3aa6e64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.909705] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.911163] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1130.911163] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f1106-63ac-3eca-18f7-f85cbeae1c5f" [ 1130.911163] env[69994]: _type = "Task" [ 1130.911163] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.919406] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f1106-63ac-3eca-18f7-f85cbeae1c5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.979994] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Successfully updated port: 467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.990846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7ad5c4-5dca-4ca8-b79d-ee79cc2f3ef9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.004074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c003d2-1d88-4eae-9707-e69dbfbdbbc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.011761] env[69994]: DEBUG oslo_vmware.api [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926233, 'name': PowerOnVM_Task, 'duration_secs': 0.404286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.012474] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1131.012762] env[69994]: DEBUG nova.compute.manager [None req-26e97ee5-cf56-4c1e-aa28-c851d8b72b8e tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1131.013674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429f184-f418-43b5-a4f4-1c4845ebdda3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.041996] env[69994]: INFO nova.compute.manager [-] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Took 1.36 seconds to deallocate network for instance. [ 1131.048301] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f332065-966c-497a-9b8a-991a99adf915 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.062583] env[69994]: DEBUG oslo_vmware.api [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926232, 'name': PowerOnVM_Task, 'duration_secs': 0.642328} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.068017] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1131.068017] env[69994]: INFO nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1131.068017] env[69994]: DEBUG nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1131.068017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9a3305-da07-4d66-9f4d-503a386fcbe9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.070200] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7240d13b-5694-45a3-9eeb-67e91a3799ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.090098] env[69994]: DEBUG nova.compute.provider_tree [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.406592] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926234, 'name': PowerOffVM_Task, 'duration_secs': 0.355807} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.406889] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1131.407719] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c3fec9-52d4-4805-8ff0-82634b17edbc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.428825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3399b351-3ba4-4c73-b5bb-be7c6a57c30b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.434679] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524f1106-63ac-3eca-18f7-f85cbeae1c5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.435312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.435610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 87c5b8e4-166c-44b9-a179-1afaef751434/87c5b8e4-166c-44b9-a179-1afaef751434.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1131.435862] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c14c3302-bb43-4500-9697-6d9fb4fac0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.445759] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1131.445759] env[69994]: value = "task-2926235" [ 1131.445759] env[69994]: _type = "Task" [ 1131.445759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.454308] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.486242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.486389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.486538] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1131.559403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.605118] env[69994]: DEBUG nova.scheduler.client.report [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.609198] env[69994]: INFO nova.compute.manager [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Took 29.16 seconds to build instance. [ 1131.717220] env[69994]: DEBUG nova.objects.instance [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.946217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1131.946217] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-74a55779-f4f6-40ff-a9f1-b36a99099987 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.958315] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926235, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.959753] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 87c5b8e4-166c-44b9-a179-1afaef751434/87c5b8e4-166c-44b9-a179-1afaef751434.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1131.960145] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1131.960629] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1131.960629] env[69994]: value = "task-2926236" [ 1131.960629] env[69994]: _type = "Task" [ 1131.960629] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.961029] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0692bbe7-3cc6-4bbe-8cfe-1af05f2f3aa8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.973937] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1131.973937] env[69994]: value = "task-2926237" [ 1131.973937] env[69994]: _type = "Task" [ 1131.973937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.982368] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926237, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.052936] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.117644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cef938cf-8bd6-490c-824c-72e423ad7f53 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.683s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.212223] env[69994]: DEBUG nova.compute.manager [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Received event network-vif-plugged-467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.212512] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Acquiring lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.212753] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.212939] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.213122] env[69994]: DEBUG nova.compute.manager [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] No waiting events found dispatching network-vif-plugged-467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.213303] env[69994]: WARNING nova.compute.manager [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Received unexpected event network-vif-plugged-467676c7-4310-48fe-845b-cf6e75e26d14 for instance with vm_state building and task_state spawning. [ 1132.214354] env[69994]: DEBUG nova.compute.manager [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Received event network-changed-467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.214354] env[69994]: DEBUG nova.compute.manager [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Refreshing instance network info cache due to event network-changed-467676c7-4310-48fe-845b-cf6e75e26d14. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1132.214354] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Acquiring lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.230944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.230944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.230944] env[69994]: DEBUG nova.network.neutron [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.230944] env[69994]: DEBUG nova.objects.instance [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'info_cache' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.247786] env[69994]: DEBUG nova.network.neutron [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Updating instance_info_cache with network_info: [{"id": "467676c7-4310-48fe-845b-cf6e75e26d14", "address": "fa:16:3e:3a:9d:0e", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467676c7-43", "ovs_interfaceid": "467676c7-4310-48fe-845b-cf6e75e26d14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.473328] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926236, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.483459] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926237, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.484102] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1132.485511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0a5218-11ac-425b-ac88-2290de2c59e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.513449] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 87c5b8e4-166c-44b9-a179-1afaef751434/87c5b8e4-166c-44b9-a179-1afaef751434.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.514008] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ff23979-3ec4-4c76-a168-60807b87cfd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.536113] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1132.536113] env[69994]: value = "task-2926238" [ 1132.536113] env[69994]: _type = "Task" [ 1132.536113] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.551849] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926238, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.612254] env[69994]: DEBUG nova.compute.manager [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Received event network-changed-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.612560] env[69994]: DEBUG nova.compute.manager [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Refreshing instance network info cache due to event network-changed-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1132.613202] env[69994]: DEBUG oslo_concurrency.lockutils [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.613202] env[69994]: DEBUG oslo_concurrency.lockutils [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.613202] env[69994]: DEBUG nova.network.neutron [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Refreshing network info cache for port 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.620389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.410s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.622872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.064s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.623225] env[69994]: DEBUG nova.objects.instance [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'resources' on Instance uuid 85b58e95-04fd-45ff-ac60-d0167031e148 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.732268] env[69994]: DEBUG nova.objects.base [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Object Instance<68eba44a-0989-47dc-a88b-102d9aa34c5d> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1132.756648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.756996] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Instance network_info: |[{"id": "467676c7-4310-48fe-845b-cf6e75e26d14", "address": "fa:16:3e:3a:9d:0e", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467676c7-43", "ovs_interfaceid": "467676c7-4310-48fe-845b-cf6e75e26d14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1132.757331] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Acquired lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.757504] env[69994]: DEBUG nova.network.neutron [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Refreshing network info cache for port 467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.758803] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:9d:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '467676c7-4310-48fe-845b-cf6e75e26d14', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.769960] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1132.773404] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1132.775824] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1a1710d-ce9d-4fd5-8d04-ceda7b88f214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.797476] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.797476] env[69994]: value = "task-2926239" [ 1132.797476] env[69994]: _type = "Task" [ 1132.797476] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.806642] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926239, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.906090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.906318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.906526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.906713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.906897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.909257] env[69994]: INFO nova.compute.manager [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Terminating instance [ 1132.974935] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926236, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.049313] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926238, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.121163] env[69994]: DEBUG nova.network.neutron [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Updated VIF entry in instance network info cache for port 467676c7-4310-48fe-845b-cf6e75e26d14. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1133.121163] env[69994]: DEBUG nova.network.neutron [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Updating instance_info_cache with network_info: [{"id": "467676c7-4310-48fe-845b-cf6e75e26d14", "address": "fa:16:3e:3a:9d:0e", "network": {"id": "07bd7cf1-fd13-4a08-894f-ca984630ac5e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1775304188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbe936f4d284e73999846251269fefd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap467676c7-43", "ovs_interfaceid": "467676c7-4310-48fe-845b-cf6e75e26d14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.210027] env[69994]: INFO nova.scheduler.client.report [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocation for migration 1f04407a-5664-4068-88a4-f8523ad869fc [ 1133.310750] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926239, 'name': CreateVM_Task, 'duration_secs': 0.484052} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.310935] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.311773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.311947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.312297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1133.312941] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d09b5779-b16f-4974-9cc3-e2a82907cbe8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.323093] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1133.323093] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ba8f86-5910-899e-7d98-1a0bdd153f0d" [ 1133.323093] env[69994]: _type = "Task" [ 1133.323093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.335793] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ba8f86-5910-899e-7d98-1a0bdd153f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.336212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.336349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.336585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.336729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.336904] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.337484] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c66cc81-d067-474c-9bdf-045545418a88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.352107] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.352331] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1133.353785] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e542856c-0871-4448-bf6c-931ee61b6243 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.363343] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1133.363343] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b834d1-f397-5de9-1ca4-a8bf1b9516f9" [ 1133.363343] env[69994]: _type = "Task" [ 1133.363343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.372039] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b834d1-f397-5de9-1ca4-a8bf1b9516f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.411166] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afd828b-7a25-46ce-a729-01e660b7575d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.414755] env[69994]: DEBUG nova.compute.manager [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1133.415024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1133.415902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38971d7-4369-4985-a448-db57c1f9841e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.424836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab0e63e-70e2-47cc-94c0-8539608d8a3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.431502] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.432145] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47c6d56f-1dbb-4c99-959e-2884fd8d0d81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.464120] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df7e891-886e-494b-b2d2-d1e9f070d595 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.466846] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1133.466846] env[69994]: value = "task-2926240" [ 1133.466846] env[69994]: _type = "Task" [ 1133.466846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.482663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa46e333-d206-4eb3-b1e1-f40e26f5fdb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.493514] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926236, 'name': CreateSnapshot_Task, 'duration_secs': 1.18785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.493514] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.493887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1133.495099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb62879-5500-42cc-9616-7e6dfae41549 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.505851] env[69994]: DEBUG nova.compute.provider_tree [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.546771] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926238, 'name': ReconfigVM_Task, 'duration_secs': 0.55424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.546835] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 87c5b8e4-166c-44b9-a179-1afaef751434/87c5b8e4-166c-44b9-a179-1afaef751434.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.547522] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8257b6a1-8415-48f5-a0d5-b6bb73786244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.554473] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1133.554473] env[69994]: value = "task-2926241" [ 1133.554473] env[69994]: _type = "Task" [ 1133.554473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.562948] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926241, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.577889] env[69994]: DEBUG nova.network.neutron [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updated VIF entry in instance network info cache for port 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1133.578267] env[69994]: DEBUG nova.network.neutron [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.623904] env[69994]: DEBUG oslo_concurrency.lockutils [req-13ed6efb-6aef-47db-99d7-b8e7ccc75e5a req-adcd135d-dbeb-4a7e-8ed1-dd13979d7e04 service nova] Releasing lock "refresh_cache-f98078e1-ee72-4bdb-aebf-405ffbb7900d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.685588] env[69994]: DEBUG nova.network.neutron [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [{"id": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "address": "fa:16:3e:93:51:57", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c8c3a9b-a3", "ovs_interfaceid": "0c8c3a9b-a328-44f8-81e2-5a480901ac9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.713242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37450b4c-2fe2-4e47-801f-10cf9ebedd78 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.832915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "489b68f2-c2f2-4710-a06f-45ad8c577441" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.833270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.833525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.834071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.834071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.836665] env[69994]: INFO nova.compute.manager [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Terminating instance [ 1133.874064] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b834d1-f397-5de9-1ca4-a8bf1b9516f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.874877] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-871b7c06-3cd0-4efc-bc78-58c3a50c9466 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.880167] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1133.880167] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5212328f-1fb5-7937-c964-45cfaf7dd61b" [ 1133.880167] env[69994]: _type = "Task" [ 1133.880167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.887716] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5212328f-1fb5-7937-c964-45cfaf7dd61b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.979296] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926240, 'name': PowerOffVM_Task, 'duration_secs': 0.211517} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.979632] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.979773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.980044] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d39ac9b-7a5c-44ab-add1-533bed06978a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.009877] env[69994]: DEBUG nova.scheduler.client.report [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.021950] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1134.021950] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1c9f6e9f-a55b-4ce0-9836-bdd44e24cb3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.030743] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1134.030743] env[69994]: value = "task-2926243" [ 1134.030743] env[69994]: _type = "Task" [ 1134.030743] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.039312] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926243, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.046941] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.047178] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.047360] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleting the datastore file [datastore1] f0b77732-aae1-4790-a2c7-75586e78eda6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.048027] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5a3de5c-f337-4249-81d5-7fc4d24bc7dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.053508] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1134.053508] env[69994]: value = "task-2926244" [ 1134.053508] env[69994]: _type = "Task" [ 1134.053508] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.063742] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.066875] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926241, 'name': Rename_Task, 'duration_secs': 0.172219} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.067217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1134.067366] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d9b9b88-a91c-4490-a1b3-51a6e3354c94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.073024] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1134.073024] env[69994]: value = "task-2926245" [ 1134.073024] env[69994]: _type = "Task" [ 1134.073024] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.079957] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926245, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.081509] env[69994]: DEBUG oslo_concurrency.lockutils [req-464a2544-f15e-437f-bb0d-302edd4ead38 req-840317a3-7726-429a-b330-eee0e46cb485 service nova] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.188806] env[69994]: DEBUG oslo_concurrency.lockutils [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "refresh_cache-68eba44a-0989-47dc-a88b-102d9aa34c5d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.343806] env[69994]: DEBUG nova.compute.manager [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1134.343806] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1134.344290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cb0ce8-a14d-4dc5-be28-b65bb3ef34e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.352565] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.353111] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88f64466-3489-4b13-a9ec-f9373b571f94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.359307] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1134.359307] env[69994]: value = "task-2926246" [ 1134.359307] env[69994]: _type = "Task" [ 1134.359307] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.370327] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.391533] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5212328f-1fb5-7937-c964-45cfaf7dd61b, 'name': SearchDatastore_Task, 'duration_secs': 0.008957} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.393281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.393691] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f98078e1-ee72-4bdb-aebf-405ffbb7900d/f98078e1-ee72-4bdb-aebf-405ffbb7900d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.396879] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44f6298a-cc6b-4276-b7c8-beb0594ff7b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.400790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.401082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.408673] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1134.408673] env[69994]: value = "task-2926247" [ 1134.408673] env[69994]: _type = "Task" [ 1134.408673] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.419188] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.516647] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.893s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.544174] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926243, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.545458] env[69994]: INFO nova.scheduler.client.report [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance 85b58e95-04fd-45ff-ac60-d0167031e148 [ 1134.571081] env[69994]: DEBUG oslo_vmware.api [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149503} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.572029] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.572029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.572029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.572029] env[69994]: INFO nova.compute.manager [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1134.572466] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1134.573213] env[69994]: DEBUG nova.compute.manager [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1134.573213] env[69994]: DEBUG nova.network.neutron [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.586686] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926245, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.774740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.775185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.775474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.775705] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.775936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.779914] env[69994]: INFO nova.compute.manager [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Terminating instance [ 1134.875840] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926246, 'name': PowerOffVM_Task, 'duration_secs': 0.204303} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.876136] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1134.876312] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1134.876572] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc07a3a3-2c2a-4112-b332-7527b9ec30bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.903926] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.924043] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.924043] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] f98078e1-ee72-4bdb-aebf-405ffbb7900d/f98078e1-ee72-4bdb-aebf-405ffbb7900d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1134.924043] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.924043] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73c3d98d-424b-423a-931c-6a71e449219b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.931346] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1134.931346] env[69994]: value = "task-2926249" [ 1134.931346] env[69994]: _type = "Task" [ 1134.931346] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.939495] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926249, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.943556] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.943940] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.944029] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Deleting the datastore file [datastore2] 489b68f2-c2f2-4710-a06f-45ad8c577441 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.944241] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-563f6e77-d0b7-42d2-99e5-5d9db9be2029 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.950881] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for the task: (returnval){ [ 1134.950881] env[69994]: value = "task-2926250" [ 1134.950881] env[69994]: _type = "Task" [ 1134.950881] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.962388] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.043901] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926243, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.053344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed32a386-32d0-4526-b285-269ca669a7ae tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "85b58e95-04fd-45ff-ac60-d0167031e148" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.475s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.066534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.067261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.067261] env[69994]: INFO nova.compute.manager [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Shelving [ 1135.087034] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926245, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.200856] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.201196] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a0807e9-2915-460a-9352-2822e5c357fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.208673] env[69994]: DEBUG oslo_vmware.api [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1135.208673] env[69994]: value = "task-2926251" [ 1135.208673] env[69994]: _type = "Task" [ 1135.208673] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.217743] env[69994]: DEBUG oslo_vmware.api [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.287721] env[69994]: DEBUG nova.compute.manager [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1135.287965] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1135.288974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dd001d-c2b8-4d5a-ad21-49d079c1b0c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.296610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.296990] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c59195d-cd64-44ab-b9ab-2187a4c8fc73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.303479] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1135.303479] env[69994]: value = "task-2926252" [ 1135.303479] env[69994]: _type = "Task" [ 1135.303479] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.312336] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.333131] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "826489f7-081d-4a3e-8a05-62d902849a61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.333581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.384504] env[69994]: DEBUG nova.compute.manager [req-c9e79929-0cdc-445c-b45d-916cf1e85e34 req-29526e8b-3982-4d50-99b0-980122e23fed service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Received event network-vif-deleted-0f0159af-9c04-46fe-8fac-ebd620726fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1135.384745] env[69994]: INFO nova.compute.manager [req-c9e79929-0cdc-445c-b45d-916cf1e85e34 req-29526e8b-3982-4d50-99b0-980122e23fed service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Neutron deleted interface 0f0159af-9c04-46fe-8fac-ebd620726fd7; detaching it from the instance and deleting it from the info cache [ 1135.385094] env[69994]: DEBUG nova.network.neutron [req-c9e79929-0cdc-445c-b45d-916cf1e85e34 req-29526e8b-3982-4d50-99b0-980122e23fed service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.428071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.428510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.430189] env[69994]: INFO nova.compute.claims [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.442124] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926249, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073356} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.442423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.443295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4931b673-3bf2-4cbf-bbd3-846b3ffacbf2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.470024] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] f98078e1-ee72-4bdb-aebf-405ffbb7900d/f98078e1-ee72-4bdb-aebf-405ffbb7900d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.471410] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07bf8c02-fa83-4bbe-b45d-c8203886c1b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.494431] env[69994]: DEBUG oslo_vmware.api [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Task: {'id': task-2926250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149936} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.495920] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1135.496136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1135.496477] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1135.496561] env[69994]: INFO nova.compute.manager [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1135.496753] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.497069] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1135.497069] env[69994]: value = "task-2926253" [ 1135.497069] env[69994]: _type = "Task" [ 1135.497069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.497280] env[69994]: DEBUG nova.compute.manager [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1135.497378] env[69994]: DEBUG nova.network.neutron [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1135.511130] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926253, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.546412] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926243, 'name': CloneVM_Task, 'duration_secs': 1.361357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.546641] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Created linked-clone VM from snapshot [ 1135.547434] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3252d69e-38d1-4ec7-ad0a-66fe4bc55d1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.554897] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Uploading image 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1135.583996] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1135.583996] env[69994]: value = "vm-587628" [ 1135.583996] env[69994]: _type = "VirtualMachine" [ 1135.583996] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1135.584615] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-728156db-e241-455e-b101-ee9bd4d534c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.592166] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926245, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.593542] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease: (returnval){ [ 1135.593542] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e9c658-f3cb-b249-1509-0b7bbac9828e" [ 1135.593542] env[69994]: _type = "HttpNfcLease" [ 1135.593542] env[69994]: } obtained for exporting VM: (result){ [ 1135.593542] env[69994]: value = "vm-587628" [ 1135.593542] env[69994]: _type = "VirtualMachine" [ 1135.593542] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1135.594068] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the lease: (returnval){ [ 1135.594068] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e9c658-f3cb-b249-1509-0b7bbac9828e" [ 1135.594068] env[69994]: _type = "HttpNfcLease" [ 1135.594068] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1135.600944] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1135.600944] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e9c658-f3cb-b249-1509-0b7bbac9828e" [ 1135.600944] env[69994]: _type = "HttpNfcLease" [ 1135.600944] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1135.719194] env[69994]: DEBUG oslo_vmware.api [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926251, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.802792] env[69994]: DEBUG nova.network.neutron [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.818275] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926252, 'name': PowerOffVM_Task, 'duration_secs': 0.210727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.818566] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.818836] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1135.819230] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6877f60-ea66-4856-82f3-1e806d63fafe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.835778] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.889802] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd229ddf-621a-4da8-b3be-01fea64097b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.893157] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1135.893703] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1135.893703] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore2] c7c17fab-71a4-44df-907e-f7b408f80236 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1135.894398] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1546231d-e5d9-4655-8e78-07546df1f91c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.904290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b316dd3b-1865-41d1-ace7-203b7049350c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.920526] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1135.920526] env[69994]: value = "task-2926256" [ 1135.920526] env[69994]: _type = "Task" [ 1135.920526] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.929918] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.949245] env[69994]: DEBUG nova.compute.manager [req-c9e79929-0cdc-445c-b45d-916cf1e85e34 req-29526e8b-3982-4d50-99b0-980122e23fed service nova] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Detach interface failed, port_id=0f0159af-9c04-46fe-8fac-ebd620726fd7, reason: Instance f0b77732-aae1-4790-a2c7-75586e78eda6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1136.011126] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.078398] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.081032] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c31c712b-bbcc-4d7b-be56-fd278ca365c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.094196] env[69994]: DEBUG oslo_vmware.api [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926245, 'name': PowerOnVM_Task, 'duration_secs': 1.647879} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.096020] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1136.096020] env[69994]: value = "task-2926257" [ 1136.096020] env[69994]: _type = "Task" [ 1136.096020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.096639] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.097022] env[69994]: INFO nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Took 10.55 seconds to spawn the instance on the hypervisor. [ 1136.097373] env[69994]: DEBUG nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.111026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089ca9b2-5f50-4393-b667-87b24e2bc3e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.119655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.121068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.126864] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1136.126864] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e9c658-f3cb-b249-1509-0b7bbac9828e" [ 1136.126864] env[69994]: _type = "HttpNfcLease" [ 1136.126864] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1136.132156] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1136.132156] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e9c658-f3cb-b249-1509-0b7bbac9828e" [ 1136.132156] env[69994]: _type = "HttpNfcLease" [ 1136.132156] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1136.135598] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.136524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a022c9-b21f-43bd-8bb7-490ded831696 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.146230] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1136.146579] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1136.226323] env[69994]: DEBUG oslo_vmware.api [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926251, 'name': PowerOnVM_Task, 'duration_secs': 0.688104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.226609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.226818] env[69994]: DEBUG nova.compute.manager [None req-de1e680f-6552-4cb7-a4b6-84d586c8cb19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.227713] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793ff997-8e86-4ed7-a76f-52f69a35e85c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.311016] env[69994]: INFO nova.compute.manager [-] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Took 1.74 seconds to deallocate network for instance. [ 1136.336183] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f718a11e-dbd6-4203-947a-56d8ecc7430a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.373836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.431936] env[69994]: DEBUG oslo_vmware.api [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174277} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.432225] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.432414] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1136.432597] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1136.432796] env[69994]: INFO nova.compute.manager [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1136.433075] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1136.433284] env[69994]: DEBUG nova.compute.manager [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1136.433383] env[69994]: DEBUG nova.network.neutron [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1136.509369] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926253, 'name': ReconfigVM_Task, 'duration_secs': 0.636636} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.509646] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Reconfigured VM instance instance-00000069 to attach disk [datastore1] f98078e1-ee72-4bdb-aebf-405ffbb7900d/f98078e1-ee72-4bdb-aebf-405ffbb7900d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.510686] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d88b9f0-bf2e-428e-9327-49ebb13ec3d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.518298] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1136.518298] env[69994]: value = "task-2926258" [ 1136.518298] env[69994]: _type = "Task" [ 1136.518298] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.528767] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926258, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.614745] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926257, 'name': PowerOffVM_Task, 'duration_secs': 0.344584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.614745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.615325] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04fa8b2-68cb-40f7-8127-091dd0a6a122 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.624715] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1136.658775] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83df83c7-e319-4c18-9099-ee36fcbc8a2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.665422] env[69994]: INFO nova.compute.manager [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Took 18.49 seconds to build instance. [ 1136.819179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.851484] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ba4cee-c45c-4ffa-afb4-7b64e4b6d026 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.857319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7d3b62-85ac-460a-b1d3-45cfa70c9ee7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.888671] env[69994]: DEBUG nova.network.neutron [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.891403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711d75d3-f75a-455b-a8fd-b3aff338ea85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.900289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416d1729-a2cb-4285-bff6-fb35a1b40f11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.915577] env[69994]: DEBUG nova.compute.provider_tree [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.029143] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926258, 'name': Rename_Task, 'duration_secs': 0.207322} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.029544] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.029935] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7eb64fc-db7c-4785-aeeb-1d9aaf83cb85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.039025] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1137.039025] env[69994]: value = "task-2926259" [ 1137.039025] env[69994]: _type = "Task" [ 1137.039025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.049826] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.168041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-460935a2-cc2d-4896-a7cb-9a18396cec32 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.179417] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1137.180893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.181255] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b451d027-d623-4982-8ce6-647566857ac7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.192345] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1137.192345] env[69994]: value = "task-2926260" [ 1137.192345] env[69994]: _type = "Task" [ 1137.192345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.211772] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926260, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.303270] env[69994]: DEBUG nova.network.neutron [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.391918] env[69994]: INFO nova.compute.manager [-] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Took 1.89 seconds to deallocate network for instance. [ 1137.419020] env[69994]: DEBUG nova.scheduler.client.report [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.468531] env[69994]: DEBUG nova.compute.manager [req-3abd4254-e51f-40ea-a919-0f89f610f920 req-1c78a464-1255-4287-9ba8-522f25e8d544 service nova] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Received event network-vif-deleted-a40388ce-c3c1-480f-8e1e-160c56294eab {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.468717] env[69994]: DEBUG nova.compute.manager [req-3abd4254-e51f-40ea-a919-0f89f610f920 req-1c78a464-1255-4287-9ba8-522f25e8d544 service nova] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Received event network-vif-deleted-cd532a29-1d4e-4026-89d2-9ef034f808a1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.494324] env[69994]: DEBUG nova.compute.manager [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.494780] env[69994]: DEBUG nova.compute.manager [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1137.494949] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.495373] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.495373] env[69994]: DEBUG nova.network.neutron [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1137.548731] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926259, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.703768] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926260, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.805943] env[69994]: INFO nova.compute.manager [-] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Took 1.37 seconds to deallocate network for instance. [ 1137.900418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.929638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.930302] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.935313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.560s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.935671] env[69994]: INFO nova.compute.claims [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.046584] env[69994]: DEBUG oslo_vmware.api [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926259, 'name': PowerOnVM_Task, 'duration_secs': 0.574345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.047327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.047327] env[69994]: INFO nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1138.047522] env[69994]: DEBUG nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.048263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ad891f-8da1-4f70-bf89-3b7045f661f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.204027] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926260, 'name': CreateSnapshot_Task, 'duration_secs': 0.858973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.204373] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1138.205137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66e1770-5160-431b-845f-e9360e091e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.246471] env[69994]: DEBUG nova.network.neutron [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updated VIF entry in instance network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1138.246889] env[69994]: DEBUG nova.network.neutron [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.314812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.409984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.410245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.440048] env[69994]: DEBUG nova.compute.utils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.444581] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1138.568218] env[69994]: INFO nova.compute.manager [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Took 18.93 seconds to build instance. [ 1138.729541] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1138.729950] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c27cca07-0367-45d7-9812-a96c72ef7526 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.740035] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1138.740035] env[69994]: value = "task-2926261" [ 1138.740035] env[69994]: _type = "Task" [ 1138.740035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.751511] env[69994]: DEBUG oslo_concurrency.lockutils [req-8ddb894d-64dd-4999-a75c-83bf044358de req-c85aee8e-4ecf-4476-a065-627bdb916563 service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.752078] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926261, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.913426] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1138.945140] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1139.071104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2221c24d-72f0-457f-aa73-a82e5ebf054b tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.462s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.183318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b1d388-fa2f-4eab-a378-433e18f42ed9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.191137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b2007a-f0eb-443d-864a-e6a4115e8d67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.223561] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbe675d-ba1f-4fc2-b129-6b06c6adc2d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.231364] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73abe4ab-8c6a-411e-a247-737b52f73d8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.245053] env[69994]: DEBUG nova.compute.provider_tree [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.254549] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926261, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.436733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.553862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.554231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.554444] env[69994]: DEBUG nova.compute.manager [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.555411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2944b8a-dfde-486a-aff6-9c78dbce2a90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.563896] env[69994]: DEBUG nova.compute.manager [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1139.564481] env[69994]: DEBUG nova.objects.instance [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'flavor' on Instance uuid f98078e1-ee72-4bdb-aebf-405ffbb7900d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.751367] env[69994]: DEBUG nova.scheduler.client.report [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.758149] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926261, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.959270] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.987250] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.987542] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.987692] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.987879] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.988045] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.988201] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.988532] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.988776] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.988974] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.989214] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.989591] env[69994]: DEBUG nova.virt.hardware [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.990540] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ea16b0-9027-4b21-9183-d63853a3103a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.998852] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6797bc0e-c872-47d2-a1bb-396dbe3d277c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.012467] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.018096] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Creating folder: Project (e9d62d6d2f204d3287b54c9d84c018ab). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1140.018400] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9dcac90-0efa-4c6e-bee9-dd419e096ae0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.029839] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Created folder: Project (e9d62d6d2f204d3287b54c9d84c018ab) in parent group-v587342. [ 1140.030037] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Creating folder: Instances. Parent ref: group-v587631. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1140.030290] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7c6bd26-3d43-4f7e-909c-b2d9c0ae5f22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.039537] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Created folder: Instances in parent group-v587631. [ 1140.039770] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.039962] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.040256] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7aa7553e-07d3-48f9-8a15-61e5ad31ff5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.056676] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.056676] env[69994]: value = "task-2926264" [ 1140.056676] env[69994]: _type = "Task" [ 1140.056676] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.064202] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926264, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.256224] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926261, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.260301] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.260909] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1140.263804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.445s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.264754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.267158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.086s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.269048] env[69994]: INFO nova.compute.claims [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1140.293715] env[69994]: INFO nova.scheduler.client.report [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted allocations for instance f0b77732-aae1-4790-a2c7-75586e78eda6 [ 1140.566407] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926264, 'name': CreateVM_Task, 'duration_secs': 0.320164} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.566630] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1140.567092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.567272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.567599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1140.567857] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c695ee85-57fd-46f5-b0a9-45f0a2170df0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.571246] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.572021] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b91b56ad-97ed-4c39-a979-016f331c8ecb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.574615] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1140.574615] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b421cf-903c-4617-2399-bd81c5138d4b" [ 1140.574615] env[69994]: _type = "Task" [ 1140.574615] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.580045] env[69994]: DEBUG oslo_vmware.api [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1140.580045] env[69994]: value = "task-2926265" [ 1140.580045] env[69994]: _type = "Task" [ 1140.580045] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.586717] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b421cf-903c-4617-2399-bd81c5138d4b, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.587507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.587656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.589061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.589061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.589061] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.591246] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c143746f-02a1-4c0f-b02c-cbac061875cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.593359] env[69994]: DEBUG oslo_vmware.api [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.599839] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.600050] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.600846] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2fee899-8e35-473f-a46d-11fefdef4247 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.605678] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1140.605678] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c135b7-43b1-abe0-0a51-5a45ca1ff21c" [ 1140.605678] env[69994]: _type = "Task" [ 1140.605678] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.613683] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c135b7-43b1-abe0-0a51-5a45ca1ff21c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.756037] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926261, 'name': CloneVM_Task, 'duration_secs': 1.701073} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.756209] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Created linked-clone VM from snapshot [ 1140.757189] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255ee1da-5f94-4bd1-af11-6bc4a1667ded {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.764491] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Uploading image 757c5b7a-e993-4b3d-811b-7c0824b6c981 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1140.774012] env[69994]: DEBUG nova.compute.utils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1140.778346] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1140.778346] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1140.793538] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1140.793538] env[69994]: value = "vm-587630" [ 1140.793538] env[69994]: _type = "VirtualMachine" [ 1140.793538] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1140.794189] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3d946680-ac48-41e4-9686-6b9e859680c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.801747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55169114-c446-4054-812c-22f440849aa5 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "f0b77732-aae1-4790-a2c7-75586e78eda6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.895s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.804277] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lease: (returnval){ [ 1140.804277] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031011-4261-6165-f27f-d08fb7587e4d" [ 1140.804277] env[69994]: _type = "HttpNfcLease" [ 1140.804277] env[69994]: } obtained for exporting VM: (result){ [ 1140.804277] env[69994]: value = "vm-587630" [ 1140.804277] env[69994]: _type = "VirtualMachine" [ 1140.804277] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1140.804616] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the lease: (returnval){ [ 1140.804616] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031011-4261-6165-f27f-d08fb7587e4d" [ 1140.804616] env[69994]: _type = "HttpNfcLease" [ 1140.804616] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1140.813507] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.813507] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031011-4261-6165-f27f-d08fb7587e4d" [ 1140.813507] env[69994]: _type = "HttpNfcLease" [ 1140.813507] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1140.822076] env[69994]: DEBUG nova.policy [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d6a1603506e4d48a9d2f8bf61475821', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f602778aac0d41c49e73c2450f31d711', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1141.090457] env[69994]: DEBUG oslo_vmware.api [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926265, 'name': PowerOffVM_Task, 'duration_secs': 0.279589} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.090840] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.090977] env[69994]: DEBUG nova.compute.manager [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.091825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6bc967-8b45-4242-ad45-168b8c055c4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.115292] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c135b7-43b1-abe0-0a51-5a45ca1ff21c, 'name': SearchDatastore_Task, 'duration_secs': 0.00877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.116121] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4322b005-b6e9-42f9-bbf0-c8d3d8f83737 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.121632] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1141.121632] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f9b95f-a5cb-14a8-8023-1a1630dee461" [ 1141.121632] env[69994]: _type = "Task" [ 1141.121632] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.130291] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f9b95f-a5cb-14a8-8023-1a1630dee461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.158153] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Successfully created port: f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1141.282346] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1141.314753] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1141.314753] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031011-4261-6165-f27f-d08fb7587e4d" [ 1141.314753] env[69994]: _type = "HttpNfcLease" [ 1141.314753] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1141.314753] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1141.314753] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52031011-4261-6165-f27f-d08fb7587e4d" [ 1141.314753] env[69994]: _type = "HttpNfcLease" [ 1141.314753] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1141.315081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4929a7e6-15dd-4915-80e2-f105c48d873d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.328933] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1141.329732] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1141.468451] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.468770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.474031] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-db435ed7-ea23-4826-8332-2ac71e40c640 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.590019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee901e5e-84d5-43f4-b44d-43cde1259f52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.595604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1ac68f-7001-442e-8739-420390e06d88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.640894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a8fc3e-0706-4091-ad8f-5c5d79e324b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.644216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-36063d9e-d30c-4c3a-be24-5f695d12f874 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.090s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.651616] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f9b95f-a5cb-14a8-8023-1a1630dee461, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.654454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.654747] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e53a4875-77e6-4a13-9a4e-004fe8014a85/e53a4875-77e6-4a13-9a4e-004fe8014a85.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.655074] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c854c64-25f5-4c33-bba7-d4da8c7487dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.658258] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef7522f-ba98-42bb-88bd-76026fb3a80f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.680307] env[69994]: DEBUG nova.compute.provider_tree [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.684362] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1141.684362] env[69994]: value = "task-2926267" [ 1141.684362] env[69994]: _type = "Task" [ 1141.684362] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.697571] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.977546] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1142.130346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.130852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.186853] env[69994]: DEBUG nova.scheduler.client.report [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.203274] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926267, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465086} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.204309] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] e53a4875-77e6-4a13-9a4e-004fe8014a85/e53a4875-77e6-4a13-9a4e-004fe8014a85.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.204309] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.205149] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f7132cf-3c08-49db-bcb9-2c4182805b52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.213932] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1142.213932] env[69994]: value = "task-2926268" [ 1142.213932] env[69994]: _type = "Task" [ 1142.213932] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.225136] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926268, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.273594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.274221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.274221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.274372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.274652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.277820] env[69994]: INFO nova.compute.manager [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Terminating instance [ 1142.292743] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1142.504172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.556902] env[69994]: DEBUG nova.compute.manager [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Received event network-vif-plugged-f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1142.557192] env[69994]: DEBUG oslo_concurrency.lockutils [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] Acquiring lock "826489f7-081d-4a3e-8a05-62d902849a61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.557993] env[69994]: DEBUG oslo_concurrency.lockutils [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] Lock "826489f7-081d-4a3e-8a05-62d902849a61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.558387] env[69994]: DEBUG oslo_concurrency.lockutils [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] Lock "826489f7-081d-4a3e-8a05-62d902849a61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.560073] env[69994]: DEBUG nova.compute.manager [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] No waiting events found dispatching network-vif-plugged-f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1142.560073] env[69994]: WARNING nova.compute.manager [req-45cad6d3-6c62-4ea7-8a7a-e10608311c14 req-cffc3087-13e0-4edf-9555-62dc8edab958 service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Received unexpected event network-vif-plugged-f04394ab-b901-419c-9ec1-c1855524b7e5 for instance with vm_state building and task_state spawning. [ 1142.633997] env[69994]: DEBUG nova.compute.utils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1142.657303] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Successfully updated port: f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.696250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.696823] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1142.699943] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.800s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.700381] env[69994]: DEBUG nova.objects.instance [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lazy-loading 'resources' on Instance uuid 489b68f2-c2f2-4710-a06f-45ad8c577441 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.723866] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926268, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068929} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.725830] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.725830] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5f0523-8e99-488c-a68b-6ef145cdad96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.747188] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] e53a4875-77e6-4a13-9a4e-004fe8014a85/e53a4875-77e6-4a13-9a4e-004fe8014a85.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.748831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71e5191d-ed2e-4041-9e57-f0fecf5f3c0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.769651] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1142.769651] env[69994]: value = "task-2926269" [ 1142.769651] env[69994]: _type = "Task" [ 1142.769651] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.778436] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926269, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.782640] env[69994]: DEBUG nova.compute.manager [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.783143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.784199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb8ac8c-a446-4a20-aea4-eceaa32b8821 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.792117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.792500] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae445ebe-c80e-4e0c-8cc9-d709e96ccb63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.865650] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.865911] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.866123] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore1] f98078e1-ee72-4bdb-aebf-405ffbb7900d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.866459] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7469b9d-0da0-4526-ab67-c5aa0cd1806a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.872702] env[69994]: DEBUG oslo_vmware.api [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1142.872702] env[69994]: value = "task-2926271" [ 1142.872702] env[69994]: _type = "Task" [ 1142.872702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.881018] env[69994]: DEBUG oslo_vmware.api [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.137276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.161145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.161357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.161629] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1143.204230] env[69994]: DEBUG nova.compute.utils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1143.207331] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1143.207331] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1143.256042] env[69994]: DEBUG nova.policy [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1143.281183] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926269, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.388579] env[69994]: DEBUG oslo_vmware.api [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.391627] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1143.391835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1143.392034] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1143.392290] env[69994]: INFO nova.compute.manager [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1143.392608] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.393126] env[69994]: DEBUG nova.compute.manager [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1143.393409] env[69994]: DEBUG nova.network.neutron [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1143.462939] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891c9ac5-a658-43e9-90e8-64866e8c364e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.471688] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb31a13c-fc09-4e11-9fb3-0df5a73d9324 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.504553] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b680e6-6e11-406e-9376-fac64ee65c47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.513102] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00325695-b171-4b84-a95c-ff7c9829f223 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.528553] env[69994]: DEBUG nova.compute.provider_tree [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.666273] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Successfully created port: 9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1143.707439] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1143.718289] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.784425] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926269, 'name': ReconfigVM_Task, 'duration_secs': 0.519188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.785671] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Reconfigured VM instance instance-0000006a to attach disk [datastore1] e53a4875-77e6-4a13-9a4e-004fe8014a85/e53a4875-77e6-4a13-9a4e-004fe8014a85.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.789045] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-544344b7-7ffc-445e-bf1a-722301767383 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.797384] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1143.797384] env[69994]: value = "task-2926272" [ 1143.797384] env[69994]: _type = "Task" [ 1143.797384] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.808369] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926272, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.937542] env[69994]: DEBUG nova.network.neutron [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Updating instance_info_cache with network_info: [{"id": "f04394ab-b901-419c-9ec1-c1855524b7e5", "address": "fa:16:3e:b2:2c:e9", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04394ab-b9", "ovs_interfaceid": "f04394ab-b901-419c-9ec1-c1855524b7e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.032313] env[69994]: DEBUG nova.scheduler.client.report [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.227320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.227668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.227938] env[69994]: INFO nova.compute.manager [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Attaching volume 15ece227-346b-4a76-a2cf-05eb2b20d7b0 to /dev/sdb [ 1144.258474] env[69994]: DEBUG nova.network.neutron [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.261464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a756a32-61df-4c01-ada1-cf59f4ead52d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.270493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd4c3c8-bbc5-4d31-86b6-2f2b0f1ff2ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.285894] env[69994]: DEBUG nova.virt.block_device [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating existing volume attachment record: dcf66839-1d2c-488e-9c67-9368dcd9cb5c {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1144.311390] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926272, 'name': Rename_Task, 'duration_secs': 0.151753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.311738] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1144.311997] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa96fae2-1166-4945-a737-ac66ba7df585 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.318557] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1144.318557] env[69994]: value = "task-2926273" [ 1144.318557] env[69994]: _type = "Task" [ 1144.318557] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.329253] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.441425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.441788] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Instance network_info: |[{"id": "f04394ab-b901-419c-9ec1-c1855524b7e5", "address": "fa:16:3e:b2:2c:e9", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04394ab-b9", "ovs_interfaceid": "f04394ab-b901-419c-9ec1-c1855524b7e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1144.538622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.541498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.227s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.541746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.543916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.107s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.545698] env[69994]: INFO nova.compute.claims [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.568716] env[69994]: INFO nova.scheduler.client.report [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocations for instance c7c17fab-71a4-44df-907e-f7b408f80236 [ 1144.573296] env[69994]: INFO nova.scheduler.client.report [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Deleted allocations for instance 489b68f2-c2f2-4710-a06f-45ad8c577441 [ 1144.596232] env[69994]: DEBUG nova.compute.manager [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Received event network-changed-f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.596466] env[69994]: DEBUG nova.compute.manager [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Refreshing instance network info cache due to event network-changed-f04394ab-b901-419c-9ec1-c1855524b7e5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1144.596727] env[69994]: DEBUG oslo_concurrency.lockutils [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] Acquiring lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.596895] env[69994]: DEBUG oslo_concurrency.lockutils [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] Acquired lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.597109] env[69994]: DEBUG nova.network.neutron [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Refreshing network info cache for port f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.720080] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1144.766433] env[69994]: INFO nova.compute.manager [-] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Took 1.37 seconds to deallocate network for instance. [ 1144.830738] env[69994]: DEBUG oslo_vmware.api [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926273, 'name': PowerOnVM_Task, 'duration_secs': 0.440369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.831062] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.831298] env[69994]: INFO nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Took 4.87 seconds to spawn the instance on the hypervisor. [ 1144.831490] env[69994]: DEBUG nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.832433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7c9a84-2ed1-4bcd-92d5-707f064f3bff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.084720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2a3662-efe3-428d-bb43-420f8affa209 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "c7c17fab-71a4-44df-907e-f7b408f80236" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.310s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.086173] env[69994]: DEBUG oslo_concurrency.lockutils [None req-041b7f63-c467-4675-a6b4-87a58c56a863 tempest-ServersTestManualDisk-1824507415 tempest-ServersTestManualDisk-1824507415-project-member] Lock "489b68f2-c2f2-4710-a06f-45ad8c577441" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.253s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.272781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.335157] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Successfully updated port: 9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1145.353334] env[69994]: INFO nova.compute.manager [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Took 9.94 seconds to build instance. [ 1145.528762] env[69994]: DEBUG nova.network.neutron [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Updated VIF entry in instance network info cache for port f04394ab-b901-419c-9ec1-c1855524b7e5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.529245] env[69994]: DEBUG nova.network.neutron [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Updating instance_info_cache with network_info: [{"id": "f04394ab-b901-419c-9ec1-c1855524b7e5", "address": "fa:16:3e:b2:2c:e9", "network": {"id": "1b7faf80-7e16-4c91-84cc-6563a0c24b62", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-724560688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f602778aac0d41c49e73c2450f31d711", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04394ab-b9", "ovs_interfaceid": "f04394ab-b901-419c-9ec1-c1855524b7e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.789701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7012c324-91ef-4076-8f6b-786f41ec47ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.801350] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65c39bd-9029-41b1-990a-feec56112115 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.835206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098695e2-33a6-43c0-8b97-cf8aa300e1a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.838044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.838200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.838490] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.845156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b16abd4-2fa2-4f10-920b-0469f2600ffd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.860164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d165d69-304f-4119-a7c5-d4af25142711 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.459s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.860792] env[69994]: DEBUG nova.compute.provider_tree [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.971843] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1145.972117] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.972279] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1145.972462] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.972611] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1145.972806] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1145.973090] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1145.973265] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1145.973433] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1145.973597] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1145.973772] env[69994]: DEBUG nova.virt.hardware [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1145.975910] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0414eed1-fbc1-4f88-afd5-1c041ca70dbc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.986794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63b4f30-accc-49b6-9957-00210c5cf901 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.001019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:2c:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f04394ab-b901-419c-9ec1-c1855524b7e5', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1146.008708] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1146.009399] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1146.009634] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45ebf3ce-db2d-428b-9f07-a3413b85b406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.030137] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1146.030137] env[69994]: value = "task-2926275" [ 1146.030137] env[69994]: _type = "Task" [ 1146.030137] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.034050] env[69994]: DEBUG oslo_concurrency.lockutils [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] Releasing lock "refresh_cache-826489f7-081d-4a3e-8a05-62d902849a61" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.034309] env[69994]: DEBUG nova.compute.manager [req-15c921d4-00c2-436c-93ec-07bb71bf2cce req-cbd0c7ba-cb5d-44e2-9074-08e26633124c service nova] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Received event network-vif-deleted-467676c7-4310-48fe-845b-cf6e75e26d14 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.039575] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926275, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1146.290021] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1146.290422] env[69994]: DEBUG nova.virt.hardware [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1146.293190] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40467f3-748c-4c2c-8284-bc84c4e797d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.305564] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1146.306929] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8468880-7db5-405a-bcdd-f29a0a73266c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.312560] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c06662-8cee-4797-a0dc-bf079900465c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.332017] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1146.333636] env[69994]: ERROR oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk due to incomplete transfer. [ 1146.334953] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8a9872eb-4022-4991-b7e1-56145aca8791 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.344939] env[69994]: DEBUG oslo_vmware.rw_handles [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52282bb2-0059-82f0-3a9c-5f88710a6831/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1146.345271] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Uploaded image 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1146.348488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1146.348590] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f754e14a-27a8-40f4-be4c-c01f2aea5e61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.362242] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1146.362242] env[69994]: value = "task-2926276" [ 1146.362242] env[69994]: _type = "Task" [ 1146.362242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.366020] env[69994]: DEBUG nova.scheduler.client.report [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.376672] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926276, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.403448] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1146.541189] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926275, 'name': CreateVM_Task, 'duration_secs': 0.412473} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.541505] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1146.542261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.542545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.542836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1146.543164] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62569bef-3f0a-4733-867c-7527c62f2685 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.550552] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1146.550552] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523b225b-031c-ccf0-2507-03d374c9d952" [ 1146.550552] env[69994]: _type = "Task" [ 1146.550552] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.562032] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523b225b-031c-ccf0-2507-03d374c9d952, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.625768] env[69994]: DEBUG nova.compute.manager [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Received event network-vif-plugged-9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.625768] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Acquiring lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.625768] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.625768] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.625768] env[69994]: DEBUG nova.compute.manager [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] No waiting events found dispatching network-vif-plugged-9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.625768] env[69994]: WARNING nova.compute.manager [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Received unexpected event network-vif-plugged-9e54b10f-7c32-425e-b571-b3602df12045 for instance with vm_state building and task_state spawning. [ 1146.625768] env[69994]: DEBUG nova.compute.manager [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Received event network-changed-9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.625768] env[69994]: DEBUG nova.compute.manager [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Refreshing instance network info cache due to event network-changed-9e54b10f-7c32-425e-b571-b3602df12045. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1146.625768] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Acquiring lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.705148] env[69994]: DEBUG nova.network.neutron [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Updating instance_info_cache with network_info: [{"id": "9e54b10f-7c32-425e-b571-b3602df12045", "address": "fa:16:3e:bb:72:c9", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e54b10f-7c", "ovs_interfaceid": "9e54b10f-7c32-425e-b571-b3602df12045", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.834935] env[69994]: DEBUG nova.compute.manager [None req-fb3ce9b4-3103-4d35-9e06-1ba03d784c58 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.836150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b332bc05-2cc8-4cd0-8539-73a009e2bde4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.870916] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926276, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.875283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.875283] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.882323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.378s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.883943] env[69994]: INFO nova.compute.claims [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.966763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.966763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.966763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "e53a4875-77e6-4a13-9a4e-004fe8014a85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.966763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.966763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.968481] env[69994]: INFO nova.compute.manager [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Terminating instance [ 1147.061195] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523b225b-031c-ccf0-2507-03d374c9d952, 'name': SearchDatastore_Task, 'duration_secs': 0.016633} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.061662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.062018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1147.062391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.063722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.064058] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1147.065432] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30f5c4a0-6197-48ba-9e08-fbc1ca8d8b3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.078706] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1147.078911] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1147.079604] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21319c9f-9702-4302-8d23-0985055f295e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.088021] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1147.088021] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526e2e71-ee55-ad28-6780-169e9c149386" [ 1147.088021] env[69994]: _type = "Task" [ 1147.088021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.094258] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526e2e71-ee55-ad28-6780-169e9c149386, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.208213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.208677] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Instance network_info: |[{"id": "9e54b10f-7c32-425e-b571-b3602df12045", "address": "fa:16:3e:bb:72:c9", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e54b10f-7c", "ovs_interfaceid": "9e54b10f-7c32-425e-b571-b3602df12045", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1147.209026] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Acquired lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.209231] env[69994]: DEBUG nova.network.neutron [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Refreshing network info cache for port 9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.210581] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:72:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e54b10f-7c32-425e-b571-b3602df12045', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1147.220516] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1147.221736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1147.221964] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f96e140-d861-4cc0-ac09-46a4e15ca00c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.244243] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1147.244243] env[69994]: value = "task-2926278" [ 1147.244243] env[69994]: _type = "Task" [ 1147.244243] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.253158] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926278, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.347316] env[69994]: INFO nova.compute.manager [None req-fb3ce9b4-3103-4d35-9e06-1ba03d784c58 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] instance snapshotting [ 1147.348055] env[69994]: DEBUG nova.objects.instance [None req-fb3ce9b4-3103-4d35-9e06-1ba03d784c58 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lazy-loading 'flavor' on Instance uuid e53a4875-77e6-4a13-9a4e-004fe8014a85 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.375174] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926276, 'name': Destroy_Task, 'duration_secs': 0.562893} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.375471] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Destroyed the VM [ 1147.375719] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1147.376030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b43238f9-c117-47b2-b112-63c2c521d11d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.384205] env[69994]: DEBUG nova.compute.utils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.386287] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1147.386287] env[69994]: value = "task-2926279" [ 1147.386287] env[69994]: _type = "Task" [ 1147.386287] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.387218] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1147.387509] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.404791] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926279, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.456496] env[69994]: DEBUG nova.policy [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1147.472731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "refresh_cache-e53a4875-77e6-4a13-9a4e-004fe8014a85" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.473017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquired lock "refresh_cache-e53a4875-77e6-4a13-9a4e-004fe8014a85" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.473296] env[69994]: DEBUG nova.network.neutron [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.596683] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]526e2e71-ee55-ad28-6780-169e9c149386, 'name': SearchDatastore_Task, 'duration_secs': 0.019095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.597607] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68fa22db-a092-4c06-89cb-02fcb487e751 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.603299] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1147.603299] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ae763-8ca7-c904-0983-e95726387f3d" [ 1147.603299] env[69994]: _type = "Task" [ 1147.603299] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.611634] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ae763-8ca7-c904-0983-e95726387f3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.756957] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926278, 'name': CreateVM_Task, 'duration_secs': 0.347435} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.757318] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1147.759729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.759729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.759729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1147.764166] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e41a6aea-0030-4048-8ddf-86f79e22687f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.768949] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1147.768949] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528628a8-647f-6bb7-c401-0e99eceb2948" [ 1147.768949] env[69994]: _type = "Task" [ 1147.768949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.778848] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528628a8-647f-6bb7-c401-0e99eceb2948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.854691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0c3af1-bba3-4ae0-b569-8624662b3991 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.876031] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Successfully created port: 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.884132] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c7eea4-8bb3-433a-a3d7-9ae08d79ca11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.888101] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1147.919623] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926279, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.971556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.971853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.001158] env[69994]: DEBUG nova.network.neutron [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.104426] env[69994]: DEBUG nova.network.neutron [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.112298] env[69994]: DEBUG nova.network.neutron [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Updated VIF entry in instance network info cache for port 9e54b10f-7c32-425e-b571-b3602df12045. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.112298] env[69994]: DEBUG nova.network.neutron [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Updating instance_info_cache with network_info: [{"id": "9e54b10f-7c32-425e-b571-b3602df12045", "address": "fa:16:3e:bb:72:c9", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e54b10f-7c", "ovs_interfaceid": "9e54b10f-7c32-425e-b571-b3602df12045", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.116200] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529ae763-8ca7-c904-0983-e95726387f3d, 'name': SearchDatastore_Task, 'duration_secs': 0.016878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.117145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.117404] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 826489f7-081d-4a3e-8a05-62d902849a61/826489f7-081d-4a3e-8a05-62d902849a61.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1148.117669] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acbf98ce-51fa-490d-b2d0-4086531cd699 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.131278] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1148.131278] env[69994]: value = "task-2926280" [ 1148.131278] env[69994]: _type = "Task" [ 1148.131278] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.144106] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.164625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4484f25b-a8ed-4bbe-8489-c3c13d84eff1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.173249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd45e5f7-d218-4b15-9367-865d700e18fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.210057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560f5ee1-2f8e-46e7-a916-d6c07a115cd1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.219052] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1387a418-c4b2-40dc-9e5e-8023f75ab434 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.235412] env[69994]: DEBUG nova.compute.provider_tree [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.280627] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528628a8-647f-6bb7-c401-0e99eceb2948, 'name': SearchDatastore_Task, 'duration_secs': 0.022055} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.281030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.281319] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1148.281599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.281802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.282051] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.282355] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e77570f-a840-4526-8773-7c513150d980 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.293478] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.293727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1148.294623] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f41906dd-c378-4dca-8584-d0d8519fb5c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.300779] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1148.300779] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52123415-05a8-f4ec-071e-e210b94f3754" [ 1148.300779] env[69994]: _type = "Task" [ 1148.300779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.309828] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52123415-05a8-f4ec-071e-e210b94f3754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.407237] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926279, 'name': RemoveSnapshot_Task, 'duration_secs': 0.864051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.407596] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1148.407901] env[69994]: DEBUG nova.compute.manager [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.408813] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bfaea3-dea3-4654-9f5c-bf779c1aec62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.412419] env[69994]: DEBUG nova.compute.manager [None req-fb3ce9b4-3103-4d35-9e06-1ba03d784c58 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance disappeared during snapshot {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1148.480291] env[69994]: DEBUG nova.compute.utils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1148.562395] env[69994]: DEBUG nova.compute.manager [None req-fb3ce9b4-3103-4d35-9e06-1ba03d784c58 tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Found 0 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1148.610190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Releasing lock "refresh_cache-e53a4875-77e6-4a13-9a4e-004fe8014a85" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.610741] env[69994]: DEBUG nova.compute.manager [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1148.610957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.612112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde3d538-8ab1-4e29-898a-d1f1903b765f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.617581] env[69994]: DEBUG oslo_concurrency.lockutils [req-b85479b2-6088-43cc-8f6f-bbafb7d5ac09 req-a6d5ca87-978e-4dce-9d99-2133a41f2b18 service nova] Releasing lock "refresh_cache-6b29cefb-8f86-4826-a1c9-873fd48c53a7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.625302] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.625678] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecbb5be0-7fcd-4062-ba46-011663825c9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.643284] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926280, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.644672] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1148.644672] env[69994]: value = "task-2926281" [ 1148.644672] env[69994]: _type = "Task" [ 1148.644672] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.654814] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.739056] env[69994]: DEBUG nova.scheduler.client.report [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.812484] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52123415-05a8-f4ec-071e-e210b94f3754, 'name': SearchDatastore_Task, 'duration_secs': 0.02166} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.813329] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1781ef4-d8ed-4289-909b-96426e6467e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.820095] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1148.820095] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254fd6f-c9c6-6188-46eb-50b83affb3ca" [ 1148.820095] env[69994]: _type = "Task" [ 1148.820095] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.829680] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254fd6f-c9c6-6188-46eb-50b83affb3ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.836454] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1148.836759] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587634', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'name': 'volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dd196e59-868b-409f-bddb-bb99b0c1092f', 'attached_at': '', 'detached_at': '', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'serial': '15ece227-346b-4a76-a2cf-05eb2b20d7b0'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1148.837691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9830a821-4f80-4662-958b-2fab9810ad30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.854579] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa88797c-d376-4dc6-89c8-b48c2f579fe6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.880712] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0/volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.881062] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25b9c11d-b95b-4d06-a861-e9a0e4cf5414 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.902425] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1148.904532] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1148.904532] env[69994]: value = "task-2926282" [ 1148.904532] env[69994]: _type = "Task" [ 1148.904532] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.913689] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.923815] env[69994]: INFO nova.compute.manager [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Shelve offloading [ 1148.930340] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.930699] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.930909] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.931127] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.931322] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.931557] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.931849] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.932034] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.932228] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.932441] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.932717] env[69994]: DEBUG nova.virt.hardware [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.933825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2135045b-2b01-4192-8511-ac77cd7ecc6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.944457] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6da344-d042-4390-bd4c-0722c9531c2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.985633] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.143856] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.144213] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 826489f7-081d-4a3e-8a05-62d902849a61/826489f7-081d-4a3e-8a05-62d902849a61.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1149.144432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1149.144700] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6bd5a277-1939-4aa2-b409-1b16fb5fdc22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.156796] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926281, 'name': PowerOffVM_Task, 'duration_secs': 0.211375} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.158252] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.158484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.158870] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1149.158870] env[69994]: value = "task-2926283" [ 1149.158870] env[69994]: _type = "Task" [ 1149.158870] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.159103] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cf4a633-7ab3-43e4-a681-f363fed816a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.172676] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.195292] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.195779] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.196111] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Deleting the datastore file [datastore1] e53a4875-77e6-4a13-9a4e-004fe8014a85 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.196586] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b01858e9-2b0c-438e-b375-40a9ae85c058 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.205701] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for the task: (returnval){ [ 1149.205701] env[69994]: value = "task-2926285" [ 1149.205701] env[69994]: _type = "Task" [ 1149.205701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.217529] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.244743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.245365] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1149.248464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.976s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.248691] env[69994]: DEBUG nova.objects.instance [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid f98078e1-ee72-4bdb-aebf-405ffbb7900d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.331812] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5254fd6f-c9c6-6188-46eb-50b83affb3ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013954} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.332186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.332394] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 6b29cefb-8f86-4826-a1c9-873fd48c53a7/6b29cefb-8f86-4826-a1c9-873fd48c53a7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1149.332687] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c661815-f014-4806-a78c-81b448fefd38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.340272] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1149.340272] env[69994]: value = "task-2926286" [ 1149.340272] env[69994]: _type = "Task" [ 1149.340272] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.342564] env[69994]: DEBUG nova.compute.manager [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-vif-plugged-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.342778] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.343043] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.343229] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.343406] env[69994]: DEBUG nova.compute.manager [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] No waiting events found dispatching network-vif-plugged-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.343688] env[69994]: WARNING nova.compute.manager [req-9a79de54-4422-4f7f-bc8d-03819062ae1f req-5d483eee-3727-46b3-99fa-51d31679bc28 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received unexpected event network-vif-plugged-7608b3ed-dbf1-48c0-a088-071f08980220 for instance with vm_state building and task_state spawning. [ 1149.353201] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.417010] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926282, 'name': ReconfigVM_Task, 'duration_secs': 0.372952} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.417432] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0/volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.423470] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98a42285-2e25-4524-b544-7aabbeeb7b13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.434148] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Successfully updated port: 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.435567] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1149.436024] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aeb66d4b-299b-4d57-af48-de4ff4fadc8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.444859] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1149.444859] env[69994]: value = "task-2926288" [ 1149.444859] env[69994]: _type = "Task" [ 1149.444859] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.451586] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1149.451586] env[69994]: value = "task-2926287" [ 1149.451586] env[69994]: _type = "Task" [ 1149.451586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.458755] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926288, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.465559] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1149.465778] env[69994]: DEBUG nova.compute.manager [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1149.466569] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23f01d8-e255-4f84-985b-7a7230e116dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.475598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.475826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.476032] env[69994]: DEBUG nova.network.neutron [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.672303] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.672753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1149.673727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc4bc9c-7390-426a-9e2a-9972ffa31893 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.701234] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 826489f7-081d-4a3e-8a05-62d902849a61/826489f7-081d-4a3e-8a05-62d902849a61.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.702077] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbcc274d-e2ab-424c-b76d-0d7cfc4e436d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.730615] env[69994]: DEBUG oslo_vmware.api [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Task: {'id': task-2926285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105405} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.732781] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.732781] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.732938] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.733267] env[69994]: INFO nova.compute.manager [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1149.733464] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.733896] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1149.733896] env[69994]: value = "task-2926289" [ 1149.733896] env[69994]: _type = "Task" [ 1149.733896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.734041] env[69994]: DEBUG nova.compute.manager [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1149.734106] env[69994]: DEBUG nova.network.neutron [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.746628] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926289, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.751392] env[69994]: DEBUG nova.compute.utils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1149.756678] env[69994]: DEBUG nova.network.neutron [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1149.759058] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1149.759559] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.812847] env[69994]: DEBUG nova.policy [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8a8bcbbe1454049982f693dbfa19790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c545eb835008401ab8672be30dbcdad9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1149.855081] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.855370] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 6b29cefb-8f86-4826-a1c9-873fd48c53a7/6b29cefb-8f86-4826-a1c9-873fd48c53a7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1149.855661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1149.855940] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-875f9057-ec55-4d57-adf5-f8bf1f5e7486 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.864685] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1149.864685] env[69994]: value = "task-2926290" [ 1149.864685] env[69994]: _type = "Task" [ 1149.864685] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.875999] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.937156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.937318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.937469] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.958911] env[69994]: DEBUG oslo_vmware.api [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926288, 'name': ReconfigVM_Task, 'duration_secs': 0.182096} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.961763] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587634', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'name': 'volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dd196e59-868b-409f-bddb-bb99b0c1092f', 'attached_at': '', 'detached_at': '', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'serial': '15ece227-346b-4a76-a2cf-05eb2b20d7b0'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1149.974988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0fc45e-bb62-46b4-9784-902146b796eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.987661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcd2799-14a3-48b6-ba92-b2a277773d9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.021390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d128fceb-2aff-4c46-b6ed-198612534040 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.030068] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8c4b48-1679-451d-94c6-7f4204bdf1dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.045272] env[69994]: DEBUG nova.compute.provider_tree [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.054405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.054720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.054988] env[69994]: INFO nova.compute.manager [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Attaching volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 to /dev/sdb [ 1150.102534] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b1b89b-513a-4ad4-9777-fca50e9bd374 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.109610] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36596c4f-e31a-48ad-b130-92a89096534e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.126456] env[69994]: DEBUG nova.virt.block_device [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating existing volume attachment record: c10266a6-cec2-4ba4-9651-d92c2bf1b188 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1150.147920] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Successfully created port: b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.250021] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926289, 'name': ReconfigVM_Task, 'duration_secs': 0.411112} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.250021] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 826489f7-081d-4a3e-8a05-62d902849a61/826489f7-081d-4a3e-8a05-62d902849a61.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.250021] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b33bb984-b187-4127-b712-b36f38b48f5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.257379] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1150.257379] env[69994]: value = "task-2926291" [ 1150.257379] env[69994]: _type = "Task" [ 1150.257379] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.261490] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1150.266187] env[69994]: DEBUG nova.network.neutron [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.270707] env[69994]: DEBUG nova.network.neutron [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.277666] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926291, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.379238] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073755} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.379664] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1150.380629] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25793e03-9b43-40a9-8f0f-a7fbf576c0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.406127] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 6b29cefb-8f86-4826-a1c9-873fd48c53a7/6b29cefb-8f86-4826-a1c9-873fd48c53a7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.406127] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4be75c20-d57d-415a-bf8d-3f0f13c26d4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.426693] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1150.426693] env[69994]: value = "task-2926293" [ 1150.426693] env[69994]: _type = "Task" [ 1150.426693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.435160] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.471528] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.497873] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1150.498932] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4e7e3b-5112-415d-89d9-a2f304ac7d38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.506975] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1150.507161] env[69994]: ERROR oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk due to incomplete transfer. [ 1150.507392] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aaff2ca9-5816-43c4-99cc-95393755db80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.516219] env[69994]: DEBUG oslo_vmware.rw_handles [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52374769-274b-3020-431f-02558b8080bb/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1150.516430] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Uploaded image 757c5b7a-e993-4b3d-811b-7c0824b6c981 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1150.518252] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1150.518776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-78b31453-1846-474b-8ecb-7df044997ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.525177] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1150.525177] env[69994]: value = "task-2926296" [ 1150.525177] env[69994]: _type = "Task" [ 1150.525177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.533466] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926296, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.548922] env[69994]: DEBUG nova.scheduler.client.report [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.605046] env[69994]: DEBUG nova.network.neutron [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.768562] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926291, 'name': Rename_Task, 'duration_secs': 0.236232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.768874] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1150.769150] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a54dec14-e8cf-4b50-9bf0-68e9822b36f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.777530] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1150.777530] env[69994]: value = "task-2926297" [ 1150.777530] env[69994]: _type = "Task" [ 1150.777530] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.781568] env[69994]: INFO nova.compute.manager [-] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Took 1.05 seconds to deallocate network for instance. [ 1150.785524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.798545] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926297, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.937618] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926293, 'name': ReconfigVM_Task, 'duration_secs': 0.427763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.937829] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 6b29cefb-8f86-4826-a1c9-873fd48c53a7/6b29cefb-8f86-4826-a1c9-873fd48c53a7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.938544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c10a7aa-ca5c-4b6f-bcd1-907814cabdde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.945566] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1150.945566] env[69994]: value = "task-2926298" [ 1150.945566] env[69994]: _type = "Task" [ 1150.945566] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.955726] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926298, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.005035] env[69994]: DEBUG nova.objects.instance [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid dd196e59-868b-409f-bddb-bb99b0c1092f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.037470] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926296, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.054191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.081392] env[69994]: DEBUG nova.compute.manager [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-vif-unplugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.081392] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.081779] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.082094] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.082433] env[69994]: DEBUG nova.compute.manager [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] No waiting events found dispatching network-vif-unplugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1151.082675] env[69994]: WARNING nova.compute.manager [req-b6836fc8-9bc8-4ebe-b810-50733c039974 req-e61ff157-c0a4-4ecf-ae83-cd5e0b6aaafe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received unexpected event network-vif-unplugged-da352ba6-e52b-4b13-8514-5db1e4d826ee for instance with vm_state shelved and task_state shelving_offloading. [ 1151.083984] env[69994]: INFO nova.scheduler.client.report [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance f98078e1-ee72-4bdb-aebf-405ffbb7900d [ 1151.107607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.107949] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Instance network_info: |[{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1151.108587] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:4f:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7608b3ed-dbf1-48c0-a088-071f08980220', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.118156] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1151.118414] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.118642] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ce96fe5-4c0f-4658-8cba-05ad51eb2bac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.142154] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.142154] env[69994]: value = "task-2926299" [ 1151.142154] env[69994]: _type = "Task" [ 1151.142154] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.150485] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926299, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.188900] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.189140] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef8fae0-e10c-4728-9462-4138896af7dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.199020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.199020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3dc2d84-6332-4828-8424-0a1947f77220 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.282203] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.282453] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.282637] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore2] 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.283812] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1151.286131] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6768aa5-e12d-4370-ac24-3708082e6d58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.292050] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926297, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.293877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.294191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.294397] env[69994]: DEBUG nova.objects.instance [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lazy-loading 'resources' on Instance uuid e53a4875-77e6-4a13-9a4e-004fe8014a85 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.295490] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1151.295490] env[69994]: value = "task-2926301" [ 1151.295490] env[69994]: _type = "Task" [ 1151.295490] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.304367] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.318382] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.318684] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.318862] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.319067] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.319313] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.319482] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.319765] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.319956] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.320170] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.320356] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.320562] env[69994]: DEBUG nova.virt.hardware [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.322336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e761609-d2c8-4c68-addc-a32d9aefdab8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.331682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3c3615-4359-4132-bb81-42cf0acc37dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.371887] env[69994]: DEBUG nova.compute.manager [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.372106] env[69994]: DEBUG nova.compute.manager [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing instance network info cache due to event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.372344] env[69994]: DEBUG oslo_concurrency.lockutils [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.372493] env[69994]: DEBUG oslo_concurrency.lockutils [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.372652] env[69994]: DEBUG nova.network.neutron [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.460620] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926298, 'name': Rename_Task, 'duration_secs': 0.152019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.461031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1151.461310] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a3da0cb-6bd0-4432-9ac1-aad4fdd193d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.469405] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1151.469405] env[69994]: value = "task-2926302" [ 1151.469405] env[69994]: _type = "Task" [ 1151.469405] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.482882] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.517399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3bc056f0-a1c7-4430-b7d9-4430b0fde86e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.290s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.538376] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926296, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.600534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ca899140-1ddf-455a-b154-8d12c51c98cb tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "f98078e1-ee72-4bdb-aebf-405ffbb7900d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.327s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.655677] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926299, 'name': CreateVM_Task, 'duration_secs': 0.392517} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.655963] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.656916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.657819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.658275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.658700] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d085a99-587e-4c79-a486-d3bcadb72b00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.665691] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1151.665691] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e34630-d372-b569-5ff8-a8ff98b31141" [ 1151.665691] env[69994]: _type = "Task" [ 1151.665691] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.675441] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e34630-d372-b569-5ff8-a8ff98b31141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.713872] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Successfully updated port: b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.790170] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926297, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.809215] env[69994]: DEBUG oslo_vmware.api [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215158} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.809501] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1151.809701] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1151.809893] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1151.812373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.812604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.832225] env[69994]: INFO nova.scheduler.client.report [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted allocations for instance 3c374550-d65b-494a-89d7-60720f6b44dc [ 1151.984979] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926302, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.022984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f403620-2975-4f0e-bdb7-1a4cacd4050f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.035605] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb724c4c-c30f-4426-a745-9175df6c010f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.042722] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926296, 'name': Destroy_Task, 'duration_secs': 1.321205} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.042888] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Destroyed the VM [ 1152.043021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1152.043313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6517168f-98bd-4f61-bbc9-9ee119a248d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.070581] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989abaf9-656a-45dc-8e2c-a40794023c59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.073309] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1152.073309] env[69994]: value = "task-2926303" [ 1152.073309] env[69994]: _type = "Task" [ 1152.073309] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.082765] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200d0b75-da7f-4795-8706-0bbff5a0a281 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.088966] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926303, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.102803] env[69994]: DEBUG nova.compute.provider_tree [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.178782] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e34630-d372-b569-5ff8-a8ff98b31141, 'name': SearchDatastore_Task, 'duration_secs': 0.010961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.179995] env[69994]: DEBUG nova.network.neutron [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updated VIF entry in instance network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.180467] env[69994]: DEBUG nova.network.neutron [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.181864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.182315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.182655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.182988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.183800] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.184437] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4976501f-3501-4aab-86fa-6670de351c53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.196591] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.196800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.198166] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d218572-593d-4632-bde9-f50cde510946 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.205068] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1152.205068] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fd6500-0475-d844-01ec-1951526ae48c" [ 1152.205068] env[69994]: _type = "Task" [ 1152.205068] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.214072] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fd6500-0475-d844-01ec-1951526ae48c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.216682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.216814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.216967] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.288543] env[69994]: DEBUG oslo_vmware.api [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926297, 'name': PowerOnVM_Task, 'duration_secs': 1.172541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.288844] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1152.289059] env[69994]: INFO nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Took 10.00 seconds to spawn the instance on the hypervisor. [ 1152.289246] env[69994]: DEBUG nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1152.290046] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d313dea-207a-4d8f-9c60-9468b69131f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.317904] env[69994]: INFO nova.compute.manager [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Detaching volume 15ece227-346b-4a76-a2cf-05eb2b20d7b0 [ 1152.336431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.355110] env[69994]: INFO nova.virt.block_device [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Attempting to driver detach volume 15ece227-346b-4a76-a2cf-05eb2b20d7b0 from mountpoint /dev/sdb [ 1152.355373] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1152.355568] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587634', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'name': 'volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dd196e59-868b-409f-bddb-bb99b0c1092f', 'attached_at': '', 'detached_at': '', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'serial': '15ece227-346b-4a76-a2cf-05eb2b20d7b0'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1152.356739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d5ea70-c8ba-43cc-ab3a-3a9edf4ed143 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.379994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3151d7-a437-4328-986a-40420d16850c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.388212] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cced77a-2dd1-427f-968c-2b15e8a9270e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.417916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d998ead6-38fd-4dc5-b3e7-6b61f06115e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.434767] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] The volume has not been displaced from its original location: [datastore1] volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0/volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1152.440090] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1152.440424] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b97215-3946-4d2c-9fb6-024b5f5d946c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.460470] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1152.460470] env[69994]: value = "task-2926304" [ 1152.460470] env[69994]: _type = "Task" [ 1152.460470] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.469234] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.480134] env[69994]: DEBUG oslo_vmware.api [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926302, 'name': PowerOnVM_Task, 'duration_secs': 0.55678} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.480409] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1152.480665] env[69994]: INFO nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Took 7.76 seconds to spawn the instance on the hypervisor. [ 1152.480929] env[69994]: DEBUG nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1152.482144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c18eab-0c88-4dcf-846f-686c570dc268 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.583857] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926303, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.604472] env[69994]: DEBUG nova.scheduler.client.report [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.686340] env[69994]: DEBUG oslo_concurrency.lockutils [req-43f4c843-94e6-46f3-8701-216c1adc490d req-b4905cbd-5bd7-420f-89f5-95a54e5056f7 service nova] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.716443] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fd6500-0475-d844-01ec-1951526ae48c, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.716997] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15ce8e71-291f-4ca8-9f9c-4f1458f0bc0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.724774] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1152.724774] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522d0912-4579-214c-13d6-6c40cde6712f" [ 1152.724774] env[69994]: _type = "Task" [ 1152.724774] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.733000] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522d0912-4579-214c-13d6-6c40cde6712f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.752114] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.807525] env[69994]: INFO nova.compute.manager [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Took 16.47 seconds to build instance. [ 1152.840691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "06fa5ab5-baab-466e-8574-5391247c13a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.840691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.840691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.840958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.841198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.843285] env[69994]: INFO nova.compute.manager [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Terminating instance [ 1152.879352] env[69994]: DEBUG nova.network.neutron [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.971599] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926304, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.005858] env[69994]: INFO nova.compute.manager [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Took 15.85 seconds to build instance. [ 1153.083929] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926303, 'name': RemoveSnapshot_Task, 'duration_secs': 0.950094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.084257] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1153.084558] env[69994]: DEBUG nova.compute.manager [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.085346] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac434704-5e98-4a5b-b025-dcb0f44de095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.109243] env[69994]: DEBUG nova.compute.manager [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.109243] env[69994]: DEBUG nova.compute.manager [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing instance network info cache due to event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1153.109243] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.109243] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.109243] env[69994]: DEBUG nova.network.neutron [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.111664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.115120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.779s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.115177] env[69994]: DEBUG nova.objects.instance [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'resources' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.135636] env[69994]: INFO nova.scheduler.client.report [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Deleted allocations for instance e53a4875-77e6-4a13-9a4e-004fe8014a85 [ 1153.236758] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522d0912-4579-214c-13d6-6c40cde6712f, 'name': SearchDatastore_Task, 'duration_secs': 0.010091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.237176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.237531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 395a4d39-29ae-4443-949f-4737e7e2341e/395a4d39-29ae-4443-949f-4737e7e2341e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.237809] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4d85188-26d5-4792-b552-0b36e0d5a0fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.246352] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1153.246352] env[69994]: value = "task-2926305" [ 1153.246352] env[69994]: _type = "Task" [ 1153.246352] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.255496] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.310335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e208c8ff-d81f-4dc5-85b0-90ff49591abe tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.977s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.346887] env[69994]: DEBUG nova.compute.manager [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1153.347279] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1153.348230] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d938ac-1db7-4e75-9ad4-5110675950d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.358104] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.358426] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5eabdb16-65f3-4168-9b56-c84fedaac402 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.366815] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1153.366815] env[69994]: value = "task-2926306" [ 1153.366815] env[69994]: _type = "Task" [ 1153.366815] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.377414] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.381924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.382364] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Instance network_info: |[{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1153.382691] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:49:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.391290] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.391893] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.391893] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba4da0f6-6c85-406b-ba02-b807904a7116 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.417125] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.417125] env[69994]: value = "task-2926307" [ 1153.417125] env[69994]: _type = "Task" [ 1153.417125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.427377] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926307, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.446404] env[69994]: DEBUG nova.compute.manager [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Received event network-vif-plugged-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.446682] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.446949] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.447181] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.447364] env[69994]: DEBUG nova.compute.manager [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] No waiting events found dispatching network-vif-plugged-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.447534] env[69994]: WARNING nova.compute.manager [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Received unexpected event network-vif-plugged-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf for instance with vm_state building and task_state spawning. [ 1153.447697] env[69994]: DEBUG nova.compute.manager [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Received event network-changed-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.447858] env[69994]: DEBUG nova.compute.manager [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Refreshing instance network info cache due to event network-changed-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1153.448098] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.448246] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.448418] env[69994]: DEBUG nova.network.neutron [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Refreshing network info cache for port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.473038] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926304, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.507812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0284c008-6af6-48a2-94be-b685a08bb9e5 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.388s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.604566] env[69994]: INFO nova.compute.manager [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Shelve offloading [ 1153.620102] env[69994]: DEBUG nova.objects.instance [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'numa_topology' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.646711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9764b087-657a-425b-a014-9493bcf54b4e tempest-ServersAaction247Test-502340419 tempest-ServersAaction247Test-502340419-project-member] Lock "e53a4875-77e6-4a13-9a4e-004fe8014a85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.682s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.759550] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456299} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.759550] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 395a4d39-29ae-4443-949f-4737e7e2341e/395a4d39-29ae-4443-949f-4737e7e2341e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.759550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1153.759550] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c115d93e-b79f-4fc2-8b6e-632d81a46a58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.770325] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1153.770325] env[69994]: value = "task-2926308" [ 1153.770325] env[69994]: _type = "Task" [ 1153.770325] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.783333] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926308, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.880647] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926306, 'name': PowerOffVM_Task, 'duration_secs': 0.435807} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.880980] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.881246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.881647] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e5efb1d-2c49-447b-a6df-744746643af4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.928744] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926307, 'name': CreateVM_Task, 'duration_secs': 0.489075} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.931858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.932773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.932964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.933381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1153.933882] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51c98f22-7269-477a-a841-b526498015e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.940861] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1153.940861] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea6e3e-8f78-a604-e2bb-6c64edc6d5e7" [ 1153.940861] env[69994]: _type = "Task" [ 1153.940861] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.954141] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea6e3e-8f78-a604-e2bb-6c64edc6d5e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.956138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.956388] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.956500] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleting the datastore file [datastore1] 06fa5ab5-baab-466e-8574-5391247c13a8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.957259] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8204adc9-c650-4519-9b14-7175264cae6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.963716] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for the task: (returnval){ [ 1153.963716] env[69994]: value = "task-2926310" [ 1153.963716] env[69994]: _type = "Task" [ 1153.963716] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.975435] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926304, 'name': ReconfigVM_Task, 'duration_secs': 1.276287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.980342] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1153.983297] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.983535] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be3eed42-c2d6-4cdd-8558-5496efdd660b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.994317] env[69994]: DEBUG nova.network.neutron [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updated VIF entry in instance network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.994666] env[69994]: DEBUG nova.network.neutron [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapda352ba6-e5", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.005842] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1154.005842] env[69994]: value = "task-2926311" [ 1154.005842] env[69994]: _type = "Task" [ 1154.005842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.016505] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.096240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.109266] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.109703] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb2be469-4ca8-4e37-8b2c-c599e4a4e3ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.121587] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1154.121587] env[69994]: value = "task-2926312" [ 1154.121587] env[69994]: _type = "Task" [ 1154.121587] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.125572] env[69994]: DEBUG nova.objects.base [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Object Instance<3c374550-d65b-494a-89d7-60720f6b44dc> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.134537] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1154.134537] env[69994]: DEBUG nova.compute.manager [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.136323] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258f3012-3d66-43c4-9265-baa34eeb6c76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.141869] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.142107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.142299] env[69994]: DEBUG nova.network.neutron [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.223342] env[69994]: DEBUG nova.network.neutron [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updated VIF entry in instance network info cache for port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1154.223696] env[69994]: DEBUG nova.network.neutron [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.277474] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926308, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069367} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.279947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.280946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e79aa67-2c54-4771-980a-42bacb517890 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.304173] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 395a4d39-29ae-4443-949f-4737e7e2341e/395a4d39-29ae-4443-949f-4737e7e2341e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.306852] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ca4c74c-e00d-4d13-b471-66413a27c138 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.327924] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1154.327924] env[69994]: value = "task-2926313" [ 1154.327924] env[69994]: _type = "Task" [ 1154.327924] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.339112] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.358654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94af4446-043a-4f8e-841e-989c3ce7f4e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.366355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f771a-cda9-4ac2-b330-8099709c5040 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.399037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbd256b-0516-4f11-ae95-58171b2c9672 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.402011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5113f02f-a1a5-4936-9262-d3e69abb75ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.411847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48ead24-3f13-4f14-9aa1-791fb1e8eff9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.416470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1154.416731] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-16e59dd0-3ed8-44bf-8599-a304ef541d94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.432204] env[69994]: DEBUG nova.compute.provider_tree [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.437020] env[69994]: DEBUG oslo_vmware.api [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1154.437020] env[69994]: value = "task-2926314" [ 1154.437020] env[69994]: _type = "Task" [ 1154.437020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.443624] env[69994]: DEBUG oslo_vmware.api [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926314, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.454565] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ea6e3e-8f78-a604-e2bb-6c64edc6d5e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015028} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.454920] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.455208] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.455520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.455713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.455911] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.456447] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab758d19-a09b-4fed-9cb7-20e07372dfbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.467499] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.467744] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1154.472887] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e105552-96bf-46ee-ab54-5402fb54a43c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.481131] env[69994]: DEBUG oslo_vmware.api [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Task: {'id': task-2926310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147861} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.482788] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.483033] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1154.483233] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1154.483415] env[69994]: INFO nova.compute.manager [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1154.483697] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1154.483994] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1154.483994] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52484af4-3cfc-1866-f1b0-52e83fba3dab" [ 1154.483994] env[69994]: _type = "Task" [ 1154.483994] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.484224] env[69994]: DEBUG nova.compute.manager [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1154.484319] env[69994]: DEBUG nova.network.neutron [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1154.496016] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52484af4-3cfc-1866-f1b0-52e83fba3dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.497649] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f73b255-10fb-470f-9ead-768a9870c253 req-cf3acb4e-a52a-4510-91c1-d5e04a8bb447 service nova] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.517085] env[69994]: DEBUG oslo_vmware.api [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926311, 'name': ReconfigVM_Task, 'duration_secs': 0.232665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.517405] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587634', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'name': 'volume-15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dd196e59-868b-409f-bddb-bb99b0c1092f', 'attached_at': '', 'detached_at': '', 'volume_id': '15ece227-346b-4a76-a2cf-05eb2b20d7b0', 'serial': '15ece227-346b-4a76-a2cf-05eb2b20d7b0'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1154.686524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "826489f7-081d-4a3e-8a05-62d902849a61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.686870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.687056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "826489f7-081d-4a3e-8a05-62d902849a61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.687299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.687493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.689736] env[69994]: INFO nova.compute.manager [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Terminating instance [ 1154.726461] env[69994]: DEBUG oslo_concurrency.lockutils [req-2c910cce-22be-4ca0-8916-49728eff301d req-b1322ac4-aaca-4f4e-b61b-ecfc6f56d33a service nova] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.840313] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926313, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.919676] env[69994]: DEBUG nova.network.neutron [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.936101] env[69994]: DEBUG nova.scheduler.client.report [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.948912] env[69994]: DEBUG oslo_vmware.api [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926314, 'name': SuspendVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.996378] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52484af4-3cfc-1866-f1b0-52e83fba3dab, 'name': SearchDatastore_Task, 'duration_secs': 0.046572} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.997148] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d01a14d-694e-4170-95eb-d6fc86c3dc29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.004512] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1155.004512] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fdb87-5c51-7e67-4f90-4d4f7559e80b" [ 1155.004512] env[69994]: _type = "Task" [ 1155.004512] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.015281] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fdb87-5c51-7e67-4f90-4d4f7559e80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.064347] env[69994]: DEBUG nova.objects.instance [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid dd196e59-868b-409f-bddb-bb99b0c1092f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.133891] env[69994]: DEBUG nova.compute.manager [req-d7b97aa0-c431-415e-a335-c242e30d333b req-d92a2a0d-3c95-4611-af49-cf0eaa65af32 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Received event network-vif-deleted-4da807e4-2d99-47d8-a155-20d29693f532 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1155.134119] env[69994]: INFO nova.compute.manager [req-d7b97aa0-c431-415e-a335-c242e30d333b req-d92a2a0d-3c95-4611-af49-cf0eaa65af32 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Neutron deleted interface 4da807e4-2d99-47d8-a155-20d29693f532; detaching it from the instance and deleting it from the info cache [ 1155.134299] env[69994]: DEBUG nova.network.neutron [req-d7b97aa0-c431-415e-a335-c242e30d333b req-d92a2a0d-3c95-4611-af49-cf0eaa65af32 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.194706] env[69994]: DEBUG nova.compute.manager [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1155.195060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.196049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c962341-f2d3-4f29-89f4-f77e1b90cbe5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.204837] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.205130] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-142dcae8-9194-41f5-9b51-7bdb1e7ee347 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.213514] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1155.213514] env[69994]: value = "task-2926316" [ 1155.213514] env[69994]: _type = "Task" [ 1155.213514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.224635] env[69994]: DEBUG nova.network.neutron [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.226041] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.338326] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926313, 'name': ReconfigVM_Task, 'duration_secs': 0.578838} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.338641] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 395a4d39-29ae-4443-949f-4737e7e2341e/395a4d39-29ae-4443-949f-4737e7e2341e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.339288] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3edbb827-1641-4d3e-97e4-f2139186cc75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.345772] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1155.345772] env[69994]: value = "task-2926317" [ 1155.345772] env[69994]: _type = "Task" [ 1155.345772] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.353486] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926317, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.422245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.444119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.449953] env[69994]: DEBUG oslo_vmware.api [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926314, 'name': SuspendVM_Task, 'duration_secs': 0.76117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.450474] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1155.450700] env[69994]: DEBUG nova.compute.manager [None req-b8307802-1f2e-42c5-bfb5-e4f97f7a388a tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.451522] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb3d000-fe0f-4e9a-b120-7d091866a5cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.517565] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520fdb87-5c51-7e67-4f90-4d4f7559e80b, 'name': SearchDatastore_Task, 'duration_secs': 0.033323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.517866] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.518152] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1155.518426] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a27f2fa-a48f-4727-b0e6-b7f34c581a31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.525905] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1155.525905] env[69994]: value = "task-2926318" [ 1155.525905] env[69994]: _type = "Task" [ 1155.525905] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.537920] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.637593] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a120de2e-253c-422a-a177-e2425cee75a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.650772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b18f857-1c61-4876-afcc-08b372319490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.686633] env[69994]: DEBUG nova.compute.manager [req-d7b97aa0-c431-415e-a335-c242e30d333b req-d92a2a0d-3c95-4611-af49-cf0eaa65af32 service nova] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Detach interface failed, port_id=4da807e4-2d99-47d8-a155-20d29693f532, reason: Instance 06fa5ab5-baab-466e-8574-5391247c13a8 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1155.690029] env[69994]: DEBUG nova.compute.manager [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-vif-unplugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1155.690131] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.690383] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.690542] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.690703] env[69994]: DEBUG nova.compute.manager [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] No waiting events found dispatching network-vif-unplugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1155.690879] env[69994]: WARNING nova.compute.manager [req-cfd53761-77e1-4790-8aaa-2d3c25573dae req-9ef00a0f-add1-4a94-8792-398327f8b2a0 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received unexpected event network-vif-unplugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b for instance with vm_state shelved and task_state shelving_offloading. [ 1155.718596] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.719571] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514e192d-b218-4d5a-b702-f50b4dcc3647 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.730200] env[69994]: INFO nova.compute.manager [-] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Took 1.25 seconds to deallocate network for instance. [ 1155.730549] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926316, 'name': PowerOffVM_Task, 'duration_secs': 0.487743} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.734068] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1155.734261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1155.734545] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1155.737194] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-392d44cb-c211-4986-8e50-47168ed4f6b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.738707] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d152e778-1285-40db-877e-c47ccc87d864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.831322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1155.833159] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1155.833159] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleting the datastore file [datastore1] 826489f7-081d-4a3e-8a05-62d902849a61 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1155.833159] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88b91c97-a7b7-4967-8280-aed9b55e241c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.842973] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for the task: (returnval){ [ 1155.842973] env[69994]: value = "task-2926321" [ 1155.842973] env[69994]: _type = "Task" [ 1155.842973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.853569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1155.853841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1155.854045] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleting the datastore file [datastore2] 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1155.854829] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7127003b-b5e6-45cc-93a3-21cc87f63071 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.864917] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926317, 'name': Rename_Task, 'duration_secs': 0.208874} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.865272] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.866062] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.866354] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9def4f1c-f407-4cb7-b9f8-f7a0e4b3fadd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.871547] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1155.871547] env[69994]: value = "task-2926322" [ 1155.871547] env[69994]: _type = "Task" [ 1155.871547] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.876602] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1155.876602] env[69994]: value = "task-2926323" [ 1155.876602] env[69994]: _type = "Task" [ 1155.876602] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.886456] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.895530] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.955399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc6e20c-3777-4977-8ffe-9c0db296f4d2 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.095s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.956623] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.860s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.956815] env[69994]: INFO nova.compute.manager [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Unshelving [ 1156.037677] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926318, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.072506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6d010cb-64dd-47f1-88bb-a16ef7337d6c tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.259s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.242412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.242720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.242947] env[69994]: DEBUG nova.objects.instance [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lazy-loading 'resources' on Instance uuid 06fa5ab5-baab-466e-8574-5391247c13a8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.354281] env[69994]: DEBUG oslo_vmware.api [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Task: {'id': task-2926321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.354576] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.354789] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.355035] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.355264] env[69994]: INFO nova.compute.manager [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1156.355569] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.355811] env[69994]: DEBUG nova.compute.manager [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.355923] env[69994]: DEBUG nova.network.neutron [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.385307] env[69994]: DEBUG oslo_vmware.api [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476481} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.385980] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.386191] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.386372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.392328] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926323, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.412275] env[69994]: INFO nova.scheduler.client.report [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted allocations for instance 85293c91-f363-4085-9eb8-2bf6514fa2f1 [ 1156.537803] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.537989] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1156.538261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1156.538561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7970f2e-f3d4-4333-97f8-22bfc40c5899 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.546258] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1156.546258] env[69994]: value = "task-2926324" [ 1156.546258] env[69994]: _type = "Task" [ 1156.546258] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.556436] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.686655] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1156.687920] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1156.687920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d35954e-f33b-4ff7-8d67-9ce2a444016f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.708942] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29dcc75-ad05-4ed8-84ff-7b53931b8263 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.737595] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.738644] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90330f7b-36c6-454d-a976-13c66262bdd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.761867] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1156.761867] env[69994]: value = "task-2926325" [ 1156.761867] env[69994]: _type = "Task" [ 1156.761867] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.771279] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926325, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.817776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.818121] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.818665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.818853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.819607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.824106] env[69994]: INFO nova.compute.manager [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Terminating instance [ 1156.890664] env[69994]: DEBUG oslo_vmware.api [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926323, 'name': PowerOnVM_Task, 'duration_secs': 0.670246} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.890994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.891221] env[69994]: INFO nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1156.891399] env[69994]: DEBUG nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.892279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af54857-c234-461d-a49a-fd478a9eb9d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.919112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.931536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a485dac-a4c9-4ac8-a279-2ad2a6437efe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.947848] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d32a93-da6e-473d-8df1-841126a9025e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.985979] env[69994]: DEBUG nova.compute.utils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.990057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465e66b9-316c-4033-9075-8a4d19d4ed98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.996956] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e71eb7-6889-4fe9-87aa-5d95e3ac391a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.012423] env[69994]: DEBUG nova.compute.provider_tree [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.058173] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068488} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.058527] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1157.059431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc8619b-cd36-472d-8d65-7d87e0508064 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.094164] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.094164] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10b01a9d-0df3-4791-9184-b6e5a8567429 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.115585] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1157.115585] env[69994]: value = "task-2926326" [ 1157.115585] env[69994]: _type = "Task" [ 1157.115585] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.126979] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926326, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.150064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.150064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.150064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.150064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.150064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.151943] env[69994]: INFO nova.compute.manager [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Terminating instance [ 1157.208640] env[69994]: DEBUG nova.network.neutron [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.272226] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926325, 'name': ReconfigVM_Task, 'duration_secs': 0.378042} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.272539] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.277927] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a493d29-e446-4215-8992-c9738625f63b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.297109] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1157.297109] env[69994]: value = "task-2926327" [ 1157.297109] env[69994]: _type = "Task" [ 1157.297109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.307447] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.328579] env[69994]: DEBUG nova.compute.manager [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1157.328734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1157.329643] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cb0d7f-2c74-4f0d-8ffd-6cbdd2c69544 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.337937] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1157.338228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-434ed82e-480f-4171-b949-578032cc7066 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.415428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.415678] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.417833] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore1] 6b29cefb-8f86-4826-a1c9-873fd48c53a7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.417833] env[69994]: INFO nova.compute.manager [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Took 18.00 seconds to build instance. [ 1157.417833] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dc4256f-57db-49d5-9c5c-bb5395a5e9e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.428912] env[69994]: DEBUG oslo_vmware.api [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1157.428912] env[69994]: value = "task-2926329" [ 1157.428912] env[69994]: _type = "Task" [ 1157.428912] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.440254] env[69994]: DEBUG oslo_vmware.api [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.491968] env[69994]: INFO nova.virt.block_device [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Booting with volume a387ddfa-4996-4758-be71-d088f121096d at /dev/sdb [ 1157.515356] env[69994]: DEBUG nova.scheduler.client.report [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.527189] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25a0c6c3-4208-4564-9d5c-70fda5f4c884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.539106] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d221660-2f64-425a-87f5-17c80129cbe3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.573259] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83a38175-5a30-4a06-b4c9-bd6e91c5164a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.583562] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902183b6-b2a8-4487-934d-32e312156f04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.626314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55c9fba-1ea9-4239-822a-a86c26254512 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.638244] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc41d9a-53e0-4255-95a2-8c1a83eeff54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.640577] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926326, 'name': ReconfigVM_Task, 'duration_secs': 0.309954} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.641029] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.641826] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3de36089-10e0-4b21-8594-70dea190df40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.650346] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1157.650346] env[69994]: value = "task-2926330" [ 1157.650346] env[69994]: _type = "Task" [ 1157.650346] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.657661] env[69994]: DEBUG nova.compute.manager [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1157.657888] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1157.658425] env[69994]: DEBUG nova.virt.block_device [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating existing volume attachment record: eeb53b60-d90b-432a-969c-557288dd34c7 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1157.661347] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e8bc81-c1b2-4ff1-85bf-bee13dbd5d7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.668913] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926330, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.671072] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.671606] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-107a4823-8875-49f6-969d-4cb486b1a36a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.679883] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1157.679883] env[69994]: value = "task-2926331" [ 1157.679883] env[69994]: _type = "Task" [ 1157.679883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.690480] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.713855] env[69994]: INFO nova.compute.manager [-] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Took 1.36 seconds to deallocate network for instance. [ 1157.730497] env[69994]: DEBUG nova.compute.manager [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.730833] env[69994]: DEBUG nova.compute.manager [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing instance network info cache due to event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1157.731216] env[69994]: DEBUG oslo_concurrency.lockutils [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.731259] env[69994]: DEBUG oslo_concurrency.lockutils [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.732025] env[69994]: DEBUG nova.network.neutron [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.809738] env[69994]: DEBUG oslo_vmware.api [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926327, 'name': ReconfigVM_Task, 'duration_secs': 0.165409} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.809738] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1157.922498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fee592a-1ba2-4d78-9a30-22bd8d605ef5 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.512s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.938535] env[69994]: DEBUG oslo_vmware.api [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300499} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.938820] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.939063] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.939271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.939416] env[69994]: INFO nova.compute.manager [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1157.939651] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.939849] env[69994]: DEBUG nova.compute.manager [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.939945] env[69994]: DEBUG nova.network.neutron [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1158.020185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.777s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.023346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.103s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.023589] env[69994]: DEBUG nova.objects.instance [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'resources' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.040503] env[69994]: INFO nova.scheduler.client.report [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Deleted allocations for instance 06fa5ab5-baab-466e-8574-5391247c13a8 [ 1158.163215] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926330, 'name': Rename_Task, 'duration_secs': 0.155489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.163533] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1158.163794] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-587fc078-70c7-4d66-872f-2f59c12e3520 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.171613] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1158.171613] env[69994]: value = "task-2926335" [ 1158.171613] env[69994]: _type = "Task" [ 1158.171613] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.186846] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.192529] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926331, 'name': PowerOffVM_Task, 'duration_secs': 0.215335} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.192804] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1158.192969] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1158.193714] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a85935e6-385f-4d15-917c-70481c65394f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.221979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.273196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1158.273196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1158.273196] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleting the datastore file [datastore1] dd196e59-868b-409f-bddb-bb99b0c1092f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.275978] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a51b895-5947-4b74-940e-f05d741ee50d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.284863] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1158.284863] env[69994]: value = "task-2926337" [ 1158.284863] env[69994]: _type = "Task" [ 1158.284863] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.294683] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.457654] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.457938] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.458700] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.458878] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.459045] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.459464] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.459648] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.459829] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1158.459956] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.498989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.530236] env[69994]: DEBUG nova.objects.instance [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'numa_topology' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.547546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-92667e76-31a3-4a37-8a80-b2dd9057bc18 tempest-ServersTestJSON-1703962916 tempest-ServersTestJSON-1703962916-project-member] Lock "06fa5ab5-baab-466e-8574-5391247c13a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.578586] env[69994]: DEBUG nova.network.neutron [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updated VIF entry in instance network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.578979] env[69994]: DEBUG nova.network.neutron [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.684486] env[69994]: DEBUG oslo_vmware.api [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926335, 'name': PowerOnVM_Task, 'duration_secs': 0.511993} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.684486] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.685206] env[69994]: INFO nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1158.685206] env[69994]: DEBUG nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.686192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd1c1cb-d264-4157-87bf-cf78fe8c5241 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.803735] env[69994]: DEBUG oslo_vmware.api [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258511} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.803735] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.803956] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1158.803956] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1158.804232] env[69994]: INFO nova.compute.manager [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1158.804373] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1158.804574] env[69994]: DEBUG nova.compute.manager [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1158.804668] env[69994]: DEBUG nova.network.neutron [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1158.864535] env[69994]: DEBUG nova.objects.instance [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'flavor' on Instance uuid 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.875774] env[69994]: DEBUG nova.network.neutron [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.963944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.032907] env[69994]: DEBUG nova.objects.base [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Object Instance<85293c91-f363-4085-9eb8-2bf6514fa2f1> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1159.082631] env[69994]: DEBUG oslo_concurrency.lockutils [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.082942] env[69994]: DEBUG nova.compute.manager [req-952062ba-8734-43a9-b5a3-2173770f6468 req-610eec55-1065-45bb-970b-0941d441206c service nova] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Received event network-vif-deleted-f04394ab-b901-419c-9ec1-c1855524b7e5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.209997] env[69994]: INFO nova.compute.manager [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Took 16.73 seconds to build instance. [ 1159.253206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bc458b-45c5-4ee3-af17-db3062caba3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.262526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f4e9e8-183e-43e4-9e0a-30906f8ae24b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.298316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c42e6a-4958-498d-aedf-ab0049cb3ca7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.307229] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898c5b44-83f0-4af0-8696-cf6c861c01db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.323452] env[69994]: DEBUG nova.compute.provider_tree [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.369886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a3c74ca3-2242-4e47-8fd7-d9c9f0552c3c tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.315s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.379347] env[69994]: INFO nova.compute.manager [-] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Took 1.44 seconds to deallocate network for instance. [ 1159.716101] env[69994]: DEBUG oslo_concurrency.lockutils [None req-270bf866-bdce-4f71-88ba-a6a37dca0e47 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.247s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.769758] env[69994]: DEBUG nova.compute.manager [req-6eb1d174-ea2b-4f92-95cb-c053d38da2aa req-8ba03255-7b1a-4943-aa8e-4982e3276a11 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Received event network-vif-deleted-54d38568-e06b-4b75-a558-72b7cd089413 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.769984] env[69994]: INFO nova.compute.manager [req-6eb1d174-ea2b-4f92-95cb-c053d38da2aa req-8ba03255-7b1a-4943-aa8e-4982e3276a11 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Neutron deleted interface 54d38568-e06b-4b75-a558-72b7cd089413; detaching it from the instance and deleting it from the info cache [ 1159.770185] env[69994]: DEBUG nova.network.neutron [req-6eb1d174-ea2b-4f92-95cb-c053d38da2aa req-8ba03255-7b1a-4943-aa8e-4982e3276a11 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.780253] env[69994]: DEBUG nova.compute.manager [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Received event network-vif-deleted-9e54b10f-7c32-425e-b571-b3602df12045 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.780452] env[69994]: DEBUG nova.compute.manager [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.780647] env[69994]: DEBUG nova.compute.manager [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1159.780862] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.780999] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.781202] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.829020] env[69994]: DEBUG nova.scheduler.client.report [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1159.889215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.240473] env[69994]: DEBUG nova.network.neutron [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.262597] env[69994]: INFO nova.compute.manager [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Rebuilding instance [ 1160.274028] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8b8e201-4b65-46c4-ab96-a01b91a2ce80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.287613] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72623d0a-4452-48ca-94a6-468428401137 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.332685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.336134] env[69994]: DEBUG nova.compute.manager [req-6eb1d174-ea2b-4f92-95cb-c053d38da2aa req-8ba03255-7b1a-4943-aa8e-4982e3276a11 service nova] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Detach interface failed, port_id=54d38568-e06b-4b75-a558-72b7cd089413, reason: Instance dd196e59-868b-409f-bddb-bb99b0c1092f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1160.339179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.117s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.339517] env[69994]: DEBUG nova.objects.instance [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lazy-loading 'resources' on Instance uuid 826489f7-081d-4a3e-8a05-62d902849a61 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.345313] env[69994]: DEBUG nova.compute.manager [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1160.347321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647873a4-957a-400c-aacb-bb228539398b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.541639] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updated VIF entry in instance network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.542079] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.742360] env[69994]: INFO nova.compute.manager [-] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Took 1.94 seconds to deallocate network for instance. [ 1160.849860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82120f3e-4ccd-452c-a719-35e86893e125 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.783s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.851612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.353s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.851814] env[69994]: INFO nova.compute.manager [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Unshelving [ 1161.047457] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.047457] env[69994]: DEBUG nova.compute.manager [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.047457] env[69994]: DEBUG nova.compute.manager [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing instance network info cache due to event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.047457] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.047457] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.047457] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.048743] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b911107-0a13-4d3b-a600-ebad6fc1996d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.058893] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61bbf13-7e9f-43e1-ab1d-83e0f97ea868 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.093151] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fd706f-915d-48fe-9f9f-e3f0cf6fc083 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.101716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7103a68e-8915-466c-bf23-019b229b78c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.117884] env[69994]: DEBUG nova.compute.provider_tree [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.254779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.365273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.365622] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a70b2fec-f398-407b-9dea-6abe4937ba21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.375764] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1161.375764] env[69994]: value = "task-2926339" [ 1161.375764] env[69994]: _type = "Task" [ 1161.375764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.386675] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.619798] env[69994]: DEBUG nova.scheduler.client.report [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.818014] env[69994]: DEBUG nova.compute.manager [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.818276] env[69994]: DEBUG nova.compute.manager [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing instance network info cache due to event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.818517] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.852046] env[69994]: DEBUG nova.compute.manager [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Received event network-changed-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.852293] env[69994]: DEBUG nova.compute.manager [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Refreshing instance network info cache due to event network-changed-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.852454] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.852600] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.852771] env[69994]: DEBUG nova.network.neutron [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Refreshing network info cache for port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.885709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.888282] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926339, 'name': PowerOffVM_Task, 'duration_secs': 0.252947} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.888530] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1161.950760] env[69994]: INFO nova.compute.manager [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Detaching volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 [ 1161.982897] env[69994]: INFO nova.virt.block_device [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Attempting to driver detach volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 from mountpoint /dev/sdb [ 1161.984041] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1161.984041] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1161.984604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24b52c8-83e5-4b18-b526-74a15b59ecd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.010025] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updated VIF entry in instance network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.010477] env[69994]: DEBUG nova.network.neutron [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.012332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf345b0-db39-4e45-948a-2efb313f5293 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.019478] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3146a34-c33c-4ce8-825b-5e15a4fb5a87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.044050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0300e573-8937-4709-8ef7-322fbc566c2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.061593] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] The volume has not been displaced from its original location: [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1162.066814] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.067130] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02e84078-baba-4c63-bed4-8991abb079a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.086939] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1162.086939] env[69994]: value = "task-2926340" [ 1162.086939] env[69994]: _type = "Task" [ 1162.086939] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.096518] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926340, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.125342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.127651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.163s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.127651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.127651] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1162.127863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.239s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.128132] env[69994]: DEBUG nova.objects.instance [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'resources' on Instance uuid 6b29cefb-8f86-4826-a1c9-873fd48c53a7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.130240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83715275-c19b-41df-8568-9ae0e5e9b465 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.139266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f45724-7bc2-4838-b370-12194826c3e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.155453] env[69994]: INFO nova.scheduler.client.report [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Deleted allocations for instance 826489f7-081d-4a3e-8a05-62d902849a61 [ 1162.156913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4e3713-0236-467b-abb6-c477bf419ff3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.167590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02252da-07da-4559-b5ad-f3017ef4b504 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.202125] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179719MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1162.202306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.513295] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7bf62c9-eaaa-40bf-8118-d7c961a953d9 req-72d526a5-e0f3-4df3-9d8c-769c996ad0e4 service nova] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.513807] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.514045] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.564586] env[69994]: DEBUG nova.network.neutron [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updated VIF entry in instance network info cache for port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.565037] env[69994]: DEBUG nova.network.neutron [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.598255] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926340, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.667488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7cbb3262-d17a-4383-842b-0fffb1c361e1 tempest-ServerDiskConfigTestJSON-696973435 tempest-ServerDiskConfigTestJSON-696973435-project-member] Lock "826489f7-081d-4a3e-8a05-62d902849a61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.981s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.781871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4267fc48-355e-47cd-b3fa-772acc2b097b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.790380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4beb1393-a5d2-4a7b-b0aa-33d6ea218efb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.821551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f324604-9aea-40d4-9620-0e82e0e83bc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.831621] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1112b2f-866f-4255-966d-b528aad4f117 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.845940] env[69994]: DEBUG nova.compute.provider_tree [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.068255] env[69994]: DEBUG oslo_concurrency.lockutils [req-02e65820-e9cc-40a8-bcfe-0106cb5e21bb req-4b993aea-85d5-4727-bd49-7388a0272e9f service nova] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.099397] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926340, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.267157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.320040] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updated VIF entry in instance network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.320444] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.349535] env[69994]: DEBUG nova.scheduler.client.report [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.600976] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926340, 'name': ReconfigVM_Task, 'duration_secs': 1.255905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.601468] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1163.605986] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f93b9e6-421c-4c89-a43a-861f4899f50c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.623922] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1163.623922] env[69994]: value = "task-2926341" [ 1163.623922] env[69994]: _type = "Task" [ 1163.623922] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.823921] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.824304] env[69994]: DEBUG nova.compute.manager [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.824516] env[69994]: DEBUG nova.compute.manager [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1163.824759] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.824922] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.825101] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.855042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.858613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.604s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.859097] env[69994]: DEBUG nova.objects.instance [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'resources' on Instance uuid dd196e59-868b-409f-bddb-bb99b0c1092f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.882221] env[69994]: INFO nova.scheduler.client.report [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance 6b29cefb-8f86-4826-a1c9-873fd48c53a7 [ 1164.134914] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926341, 'name': ReconfigVM_Task, 'duration_secs': 0.158605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.135263] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1164.397485] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6ac5075-3e80-4c19-bb5c-3f7a10699371 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "6b29cefb-8f86-4826-a1c9-873fd48c53a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.579s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.537621] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1245b64-6a06-4271-9195-4af5f8d3bc9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.549074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac380001-c7ca-4f90-b20f-af2532ccfe73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.587193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b031328a-2ba5-4a9a-8c03-e30aa5a68ebb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.597188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159367fa-4a3c-4bf5-9c19-42117db5b699 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.619253] env[69994]: DEBUG nova.compute.provider_tree [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.791375] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updated VIF entry in instance network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1164.791375] env[69994]: DEBUG nova.network.neutron [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.122902] env[69994]: DEBUG nova.scheduler.client.report [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.187456] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.187789] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1522794e-6276-49ef-b5f8-9d22fa9c056b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.196889] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1165.196889] env[69994]: value = "task-2926342" [ 1165.196889] env[69994]: _type = "Task" [ 1165.196889] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.206747] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.294473] env[69994]: DEBUG oslo_concurrency.lockutils [req-056b33b2-fbc3-4f6b-84c0-6c47521eb096 req-42e1abae-29aa-46db-88f8-3a52b286e610 service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.561096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.561287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.630261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.632335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.747s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.632742] env[69994]: DEBUG nova.objects.instance [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'pci_requests' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.666476] env[69994]: INFO nova.scheduler.client.report [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted allocations for instance dd196e59-868b-409f-bddb-bb99b0c1092f [ 1165.707905] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1165.708155] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1165.708356] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1165.709203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c701524e-126f-4be0-9825-bcc7e81641f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.731372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9777d0ef-1e16-4f2a-8807-ef64bfd19a5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.739495] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1165.739590] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.740395] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022bd3db-1719-4b28-a01c-b74bf1db2434 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.748370] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1165.748538] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b778b33a-348a-4257-b4ba-251532de2ced {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.836330] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1165.836574] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1165.836768] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.837142] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fde5c3f1-fb07-47b4-9a62-ea9d90430753 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.844961] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1165.844961] env[69994]: value = "task-2926344" [ 1165.844961] env[69994]: _type = "Task" [ 1165.844961] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.853262] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.064290] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1166.137846] env[69994]: DEBUG nova.objects.instance [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'numa_topology' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.174310] env[69994]: DEBUG oslo_concurrency.lockutils [None req-503325fb-51a1-444d-a752-bd853ceaf9b4 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "dd196e59-868b-409f-bddb-bb99b0c1092f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.026s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.355601] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.508177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.355865] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.356070] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.356257] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.588391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.639993] env[69994]: INFO nova.compute.claims [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.861675] env[69994]: INFO nova.virt.block_device [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Booting with volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 at /dev/sdb [ 1166.901050] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39328383-75d6-4aef-8ba4-b973e84e5f05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.912945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a70cdd6-e4af-44a8-a67b-5868a4fe81e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.948767] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baaeab90-ab01-4be1-9203-67e5b9e76f3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.959079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac16c0e-84e6-4341-b357-ffc3f8c7fe0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.993807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf85fc7-a64c-4ff8-88d1-410ffbf69fb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.002505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999282f9-0511-4d30-9c7d-b8fe244e9ee9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.017596] env[69994]: DEBUG nova.virt.block_device [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating existing volume attachment record: 97cb10c1-588c-4171-988c-94b01c9f57c4 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1167.798340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930e1034-dae1-4a02-ab95-38a4f6247dc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.807032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19da7968-cddb-489f-9c15-e1f1314d9ce4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.837754] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0477e31d-ffff-4b17-bce6-681850f93290 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.846183] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2e6db7-79ec-4dbb-8db4-cdde1da8b84c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.859937] env[69994]: DEBUG nova.compute.provider_tree [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.363306] env[69994]: DEBUG nova.scheduler.client.report [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.721591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.721832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.868734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.237s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.870914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.669s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.899824] env[69994]: INFO nova.network.neutron [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1169.133226] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1169.133498] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1169.133662] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1169.133846] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1169.133993] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1169.134158] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1169.134361] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1169.134518] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1169.134680] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1169.134842] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1169.135024] env[69994]: DEBUG nova.virt.hardware [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1169.135897] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac64354-6778-4474-8839-e12c5cc113ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.145948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e63e67-d596-4d86-ad7c-bdb58c3894de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.161413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:2f:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29cc87df-3c6e-45eb-a80d-5127f53062e1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.169156] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.169805] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.170113] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8993c876-f3e6-4584-9931-b7023c7d29aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.191145] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.191145] env[69994]: value = "task-2926345" [ 1169.191145] env[69994]: _type = "Task" [ 1169.191145] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.199058] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926345, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.224028] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1169.701058] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926345, 'name': CreateVM_Task, 'duration_secs': 0.344307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.701244] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1169.701971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.702153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.702471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1169.702728] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c96426af-8600-4681-990c-482849999ab6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.707070] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1169.707070] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a03cd-cada-760a-4e02-0c770cc8a6a2" [ 1169.707070] env[69994]: _type = "Task" [ 1169.707070] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.714580] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a03cd-cada-760a-4e02-0c770cc8a6a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.842963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.896614] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.896880] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 68eba44a-0989-47dc-a88b-102d9aa34c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.896880] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.897013] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.897138] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 87c5b8e4-166c-44b9-a179-1afaef751434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.897255] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 395a4d39-29ae-4443-949f-4737e7e2341e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1169.897369] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance a828caf9-2b61-4449-b1ee-25f0828380d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1170.218400] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a03cd-cada-760a-4e02-0c770cc8a6a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.218739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.218971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.219228] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.219379] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.219570] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.219889] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f836c44b-317e-49f8-9511-af2ca5cac1f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.231271] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.231466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.232263] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8fe2006-fdeb-42d9-8bb3-9041c8b8d125 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.239082] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1170.239082] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520bcf24-9ce1-b21a-545e-0c47170eab63" [ 1170.239082] env[69994]: _type = "Task" [ 1170.239082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.247934] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520bcf24-9ce1-b21a-545e-0c47170eab63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.370888] env[69994]: DEBUG nova.compute.manager [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.371126] env[69994]: DEBUG oslo_concurrency.lockutils [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.371336] env[69994]: DEBUG oslo_concurrency.lockutils [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.371502] env[69994]: DEBUG oslo_concurrency.lockutils [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.371668] env[69994]: DEBUG nova.compute.manager [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] No waiting events found dispatching network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1170.371882] env[69994]: WARNING nova.compute.manager [req-36943838-5f04-4622-9e90-1239ee6c9505 req-77aab4c3-f63d-4b48-ac43-3ae6c48a2638 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received unexpected event network-vif-plugged-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b for instance with vm_state shelved_offloaded and task_state spawning. [ 1170.400111] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 3c374550-d65b-494a-89d7-60720f6b44dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1170.400270] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 85293c91-f363-4085-9eb8-2bf6514fa2f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1170.452793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.453052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.453257] env[69994]: DEBUG nova.network.neutron [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1170.750013] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520bcf24-9ce1-b21a-545e-0c47170eab63, 'name': SearchDatastore_Task, 'duration_secs': 0.010149} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.750837] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa8508ca-5785-445c-8a72-b5c698f0ade5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.756567] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1170.756567] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e3623b-b7e9-2eaf-62fa-72d3318a929f" [ 1170.756567] env[69994]: _type = "Task" [ 1170.756567] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.764235] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e3623b-b7e9-2eaf-62fa-72d3318a929f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.902960] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance fe716314-1b5d-4b05-b34d-dfd444ed0c8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1171.139357] env[69994]: DEBUG nova.network.neutron [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.267623] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e3623b-b7e9-2eaf-62fa-72d3318a929f, 'name': SearchDatastore_Task, 'duration_secs': 0.009981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.267898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.268182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.268451] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00221c3d-3bf7-4249-8f35-bc295b0b2291 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.275572] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1171.275572] env[69994]: value = "task-2926346" [ 1171.275572] env[69994]: _type = "Task" [ 1171.275572] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.283301] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.378907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.379269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.379678] env[69994]: DEBUG nova.objects.instance [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid 87c5b8e4-166c-44b9-a179-1afaef751434 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.405483] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 7963eb9f-66a1-417b-928b-3b5cef7847be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1171.405733] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1171.405880] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1171.563970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea5d0a4-f82a-4111-bd7e-5016256f447a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.575804] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d04785-7d9e-4d48-81ec-739337498fb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.616809] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db140664-d7e1-42bb-a46e-9211073d86c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.626928] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be0c89a-3d4e-40d9-a9de-5e04fac23a15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.643584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.646171] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.683739] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='87df7cbad426d440432c8618e9eabf2a',container_format='bare',created_at=2025-03-11T12:39:16Z,direct_url=,disk_format='vmdk',id=757c5b7a-e993-4b3d-811b-7c0824b6c981,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1547684444-shelved',owner='0af2d3f09d264d4c9bba8747f74383bc',properties=ImageMetaProps,protected=,size=31664128,status='active',tags=,updated_at=2025-03-11T12:39:32Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1171.684008] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1171.684189] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1171.684385] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1171.684535] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1171.684687] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1171.684912] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1171.685093] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1171.685254] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1171.685427] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1171.685606] env[69994]: DEBUG nova.virt.hardware [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1171.686633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf1ab78-f5d1-4ebd-9ae6-90c3fd8131b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.697349] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e5223c-1553-4179-87e1-98d33003b87f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.714271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:f0:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1171.722440] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1171.723042] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1171.723749] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6732b6e4-f655-450a-ba6a-f1a68e0695fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.751354] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.751354] env[69994]: value = "task-2926347" [ 1171.751354] env[69994]: _type = "Task" [ 1171.751354] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.761708] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926347, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.786803] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926346, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.973215] env[69994]: DEBUG nova.objects.instance [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid 87c5b8e4-166c-44b9-a179-1afaef751434 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.083278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.083646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.084500] env[69994]: DEBUG nova.compute.manager [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1172.085445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76007a3e-248e-4087-afbe-0aeb5e809389 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.092298] env[69994]: DEBUG nova.compute.manager [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1172.092848] env[69994]: DEBUG nova.objects.instance [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'flavor' on Instance uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.149882] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.261966] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926347, 'name': CreateVM_Task, 'duration_secs': 0.371957} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.262166] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1172.262845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.263023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.263404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1172.263670] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21c61a71-225d-4982-a6b2-fe5fbff98ff9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.268538] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1172.268538] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52de5ed1-b21f-890b-c941-138b58a925ca" [ 1172.268538] env[69994]: _type = "Task" [ 1172.268538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.276634] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52de5ed1-b21f-890b-c941-138b58a925ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.284688] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926346, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608116} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.284950] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.285181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.285424] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efa7800a-3ba2-4d18-98fa-2989527dd467 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.291910] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1172.291910] env[69994]: value = "task-2926348" [ 1172.291910] env[69994]: _type = "Task" [ 1172.291910] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.301689] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.415273] env[69994]: DEBUG nova.compute.manager [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.415480] env[69994]: DEBUG nova.compute.manager [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing instance network info cache due to event network-changed-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1172.415689] env[69994]: DEBUG oslo_concurrency.lockutils [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.415833] env[69994]: DEBUG oslo_concurrency.lockutils [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.415988] env[69994]: DEBUG nova.network.neutron [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Refreshing network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.475533] env[69994]: DEBUG nova.objects.base [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance<87c5b8e4-166c-44b9-a179-1afaef751434> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1172.475765] env[69994]: DEBUG nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.535517] env[69994]: DEBUG nova.policy [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1172.654556] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1172.654710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.784s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.654965] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.388s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.655196] env[69994]: DEBUG nova.objects.instance [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'pci_requests' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.656495] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.656645] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1172.782323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.782612] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Processing image 757c5b7a-e993-4b3d-811b-7c0824b6c981 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.782854] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.783050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.783208] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.783704] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-113dc92a-4750-4d1f-b086-b7c5b293f918 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.793524] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.793718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.797720] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd39018f-1eec-40d2-8efa-a60ac5e84ba2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.805884] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.807140] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1172.807532] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1172.807532] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c8429e-db81-9c53-1599-5790fb8d07d9" [ 1172.807532] env[69994]: _type = "Task" [ 1172.807532] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.808231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7effa196-7194-45ea-b4a6-6f58d5b2caec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.828298] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1172.828649] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Fetch image to [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17/OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1172.828823] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Downloading stream optimized image 757c5b7a-e993-4b3d-811b-7c0824b6c981 to [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17/OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17.vmdk on the data store datastore1 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1172.828998] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Downloading image file data 757c5b7a-e993-4b3d-811b-7c0824b6c981 to the ESX as VM named 'OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1172.839290] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.840088] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55e1d57b-cb4c-466c-8800-fe6066ce661d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.880219] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1172.880219] env[69994]: value = "task-2926349" [ 1172.880219] env[69994]: _type = "Task" [ 1172.880219] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.889593] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.915584] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1172.915584] env[69994]: value = "resgroup-9" [ 1172.915584] env[69994]: _type = "ResourcePool" [ 1172.915584] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1172.915950] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a1e428f3-511b-40e4-93ee-cce43afd010d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.941374] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lease: (returnval){ [ 1172.941374] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3759b-8866-9446-800e-4ae6cfefb233" [ 1172.941374] env[69994]: _type = "HttpNfcLease" [ 1172.941374] env[69994]: } obtained for vApp import into resource pool (val){ [ 1172.941374] env[69994]: value = "resgroup-9" [ 1172.941374] env[69994]: _type = "ResourcePool" [ 1172.941374] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1172.942144] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the lease: (returnval){ [ 1172.942144] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3759b-8866-9446-800e-4ae6cfefb233" [ 1172.942144] env[69994]: _type = "HttpNfcLease" [ 1172.942144] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1172.949643] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1172.949643] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3759b-8866-9446-800e-4ae6cfefb233" [ 1172.949643] env[69994]: _type = "HttpNfcLease" [ 1172.949643] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1173.090775] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.091253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.091342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.091482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.091661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.093800] env[69994]: INFO nova.compute.manager [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Terminating instance [ 1173.099802] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.100082] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd8ac80c-105d-4892-9aa9-88d98027bdfd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.111111] env[69994]: DEBUG oslo_vmware.api [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1173.111111] env[69994]: value = "task-2926351" [ 1173.111111] env[69994]: _type = "Task" [ 1173.111111] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.121080] env[69994]: DEBUG oslo_vmware.api [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.156151] env[69994]: DEBUG nova.network.neutron [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updated VIF entry in instance network info cache for port dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.156535] env[69994]: DEBUG nova.network.neutron [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.160517] env[69994]: DEBUG nova.objects.instance [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'numa_topology' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.176324] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] There are 60 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1173.176508] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 6b29cefb-8f86-4826-a1c9-873fd48c53a7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.391822] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.449522] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1173.449522] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3759b-8866-9446-800e-4ae6cfefb233" [ 1173.449522] env[69994]: _type = "HttpNfcLease" [ 1173.449522] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1173.449864] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1173.449864] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b3759b-8866-9446-800e-4ae6cfefb233" [ 1173.449864] env[69994]: _type = "HttpNfcLease" [ 1173.449864] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1173.450586] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e9b7df-2af2-41c3-83e6-d9edd68dbf9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.461146] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1173.461343] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating HTTP connection to write to file with size = 31664128 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1173.524484] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e2c57daa-9bd7-4b66-ad2d-d65c7959d384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.597468] env[69994]: DEBUG nova.compute.manager [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1173.597770] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1173.598813] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975dddae-a9f8-4cd0-bf88-85a6ffb85225 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.607259] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.607259] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b27ac158-a521-4f2b-8b38-e7460c052a61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.616530] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1173.616530] env[69994]: value = "task-2926352" [ 1173.616530] env[69994]: _type = "Task" [ 1173.616530] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.623294] env[69994]: DEBUG oslo_vmware.api [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926351, 'name': PowerOffVM_Task, 'duration_secs': 0.336003} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.623978] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.624226] env[69994]: DEBUG nova.compute.manager [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1173.625058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510f73d1-c6e7-4667-a093-67148b0fe8b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.630946] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.659401] env[69994]: DEBUG oslo_concurrency.lockutils [req-6a0d371b-7756-46b8-b3e0-95462b738b42 req-303e362a-256e-4cdc-a749-278666b73d28 service nova] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.679885] env[69994]: INFO nova.compute.claims [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.683614] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 826489f7-081d-4a3e-8a05-62d902849a61] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1173.896217] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926349, 'name': ReconfigVM_Task, 'duration_secs': 0.623395} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.898339] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41/60f6d502-0fef-4764-8c1f-1b1d5ab3db41.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.899812] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_format': None, 'boot_index': 0, 'device_type': 'disk', 'encrypted': False, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'size': 0, 'image_id': 'f75f967d-5bd8-4c15-9a52-96f7e9dd9d48'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'}, 'attachment_id': '97cb10c1-588c-4171-988c-94b01c9f57c4', 'device_type': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1173.900043] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1173.900261] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1173.901179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed10ca7e-d679-4da3-abb9-d2a07060402a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.923793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deadecff-4053-40dd-90b7-fd5a69e1d40a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.957060] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.964464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4adc05e6-be98-442d-a35f-fcacf6284130 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.993735] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1173.993735] env[69994]: value = "task-2926353" [ 1173.993735] env[69994]: _type = "Task" [ 1173.993735] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.004831] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926353, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.076478] env[69994]: DEBUG nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Successfully updated port: 1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1174.131043] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926352, 'name': PowerOffVM_Task, 'duration_secs': 0.215037} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.131390] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1174.131496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1174.131772] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3917f6f4-88f6-48e6-8df2-9d6855a3c51a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.143081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78ecb999-9cd9-4499-8c42-4205f93cf93f tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.191136] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e53a4875-77e6-4a13-9a4e-004fe8014a85] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.214129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1174.214436] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1174.214615] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleting the datastore file [datastore1] 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1174.218219] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-993b13bd-fcdf-456b-aab7-d5cd830ec55e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.228233] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1174.228233] env[69994]: value = "task-2926355" [ 1174.228233] env[69994]: _type = "Task" [ 1174.228233] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.242261] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.505766] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.526828] env[69994]: DEBUG nova.compute.manager [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.527346] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.527603] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.527792] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.527995] env[69994]: DEBUG nova.compute.manager [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] No waiting events found dispatching network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1174.528225] env[69994]: WARNING nova.compute.manager [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received unexpected event network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 for instance with vm_state active and task_state None. [ 1174.528549] env[69994]: DEBUG nova.compute.manager [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.528755] env[69994]: DEBUG nova.compute.manager [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-1c0f982d-cb97-4c63-b8e5-af47421200c1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1174.528952] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.529105] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.529269] env[69994]: DEBUG nova.network.neutron [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port 1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1174.579393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.586044] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1174.586044] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1174.586044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3883848d-2760-4518-b7b2-79f5e6fa73fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.593112] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1174.593288] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1174.593534] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1c9f2e24-c180-403b-b858-77e2418ca06f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.697547] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f98078e1-ee72-4bdb-aebf-405ffbb7900d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1174.739962] env[69994]: DEBUG oslo_vmware.api [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.740261] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.740446] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1174.740621] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1174.740837] env[69994]: INFO nova.compute.manager [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1174.741101] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1174.741303] env[69994]: DEBUG nova.compute.manager [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1174.741394] env[69994]: DEBUG nova.network.neutron [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1174.812335] env[69994]: DEBUG nova.objects.instance [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'flavor' on Instance uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.855905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b16a156-ce6a-439a-a77f-48f97ea4d5da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.862572] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522b2919-ddfb-f747-1b77-9f26f98ebc16/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1174.862792] env[69994]: INFO nova.virt.vmwareapi.images [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Downloaded image file data 757c5b7a-e993-4b3d-811b-7c0824b6c981 [ 1174.865538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84537505-02f8-44c3-b544-55f157609fd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.868805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80288985-7fce-4522-90b3-d1d04b0da623 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.911874] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a176854e-0727-4496-bef2-3f87ef6acb31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.914891] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7075000d-8c02-475a-a8bd-a2c47981c518 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.923240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70a3268-85b0-45e2-a141-5bcebbeff6f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.937610] env[69994]: DEBUG nova.compute.provider_tree [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.943138] env[69994]: INFO nova.virt.vmwareapi.images [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] The imported VM was unregistered [ 1174.949471] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1174.949471] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Creating directory with path [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.949471] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32dc8db3-5de6-450c-919f-4d21f8ffb10a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.961018] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Created directory with path [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.961018] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17/OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17.vmdk to [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1174.961018] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2cf68868-b251-4a00-ac37-b59c777fc196 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.967501] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1174.967501] env[69994]: value = "task-2926357" [ 1174.967501] env[69994]: _type = "Task" [ 1174.967501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.976039] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.005196] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926353, 'name': ReconfigVM_Task, 'duration_secs': 0.641081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.005911] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1175.011029] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bacefdb-7966-405c-9fec-cfc0bbe0d54e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.027252] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1175.027252] env[69994]: value = "task-2926358" [ 1175.027252] env[69994]: _type = "Task" [ 1175.027252] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.041079] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.201215] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 85b58e95-04fd-45ff-ac60-d0167031e148] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.309390] env[69994]: DEBUG nova.network.neutron [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Added VIF to instance network info cache for port 1c0f982d-cb97-4c63-b8e5-af47421200c1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1175.309857] env[69994]: DEBUG nova.network.neutron [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "address": "fa:16:3e:a6:53:3b", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0f982d-cb", "ovs_interfaceid": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.318230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.318533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.318946] env[69994]: DEBUG nova.network.neutron [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.318946] env[69994]: DEBUG nova.objects.instance [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'info_cache' on Instance uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.442078] env[69994]: DEBUG nova.scheduler.client.report [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.478880] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.539039] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926358, 'name': ReconfigVM_Task, 'duration_secs': 0.179043} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.539378] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1175.540039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b23a20e-e230-4b8d-901a-a8abe7acb667 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.548406] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1175.548406] env[69994]: value = "task-2926359" [ 1175.548406] env[69994]: _type = "Task" [ 1175.548406] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.559190] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926359, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.708230] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 83a6beb7-5e26-4d90-87c3-28e4f8f1e34a] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1175.726384] env[69994]: DEBUG nova.network.neutron [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.812350] env[69994]: DEBUG oslo_concurrency.lockutils [req-96b21b43-a2fc-4586-99b5-24848dcd0005 req-697f0a02-5d32-48e6-8ba2-fa99e47f3b94 service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.812888] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.813122] env[69994]: DEBUG nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.822485] env[69994]: DEBUG nova.objects.base [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Object Instance<29ea539a-d8f4-487b-b5e7-1f15534272f9> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1175.947600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.292s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.950907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.362s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.952397] env[69994]: INFO nova.compute.claims [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1175.980207] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.001482] env[69994]: INFO nova.network.neutron [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating port da352ba6-e52b-4b13-8514-5db1e4d826ee with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1176.061366] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926359, 'name': Rename_Task, 'duration_secs': 0.378287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.061651] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1176.062012] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d790397-9b02-4e32-89c9-d8ce32330617 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.072846] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1176.072846] env[69994]: value = "task-2926360" [ 1176.072846] env[69994]: _type = "Task" [ 1176.072846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.084755] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.212395] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 5b73cd44-6c89-4e12-9195-16b5172cbf2c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.229380] env[69994]: INFO nova.compute.manager [-] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Took 1.49 seconds to deallocate network for instance. [ 1176.364994] env[69994]: WARNING nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1176.365238] env[69994]: WARNING nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1176.365404] env[69994]: WARNING nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] 1c0f982d-cb97-4c63-b8e5-af47421200c1 already exists in list: port_ids containing: ['1c0f982d-cb97-4c63-b8e5-af47421200c1']. ignoring it [ 1176.489708] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.584357] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926360, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.680966] env[69994]: DEBUG nova.compute.manager [req-4793c8ad-720f-4f29-9f3d-680affea7046 req-1c625d78-d53a-4511-adb8-47d7c1a381d6 service nova] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Received event network-vif-deleted-0c8c3a9b-a328-44f8-81e2-5a480901ac9f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.712205] env[69994]: DEBUG nova.network.neutron [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.715911] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: dd196e59-868b-409f-bddb-bb99b0c1092f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1176.737187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.806938] env[69994]: DEBUG nova.network.neutron [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "address": "fa:16:3e:a6:53:3b", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0f982d-cb", "ovs_interfaceid": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.983355] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.085899] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926360, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.123045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f765ec-70a4-4f95-8a83-0d0677f73fda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.131375] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4812bf2f-4409-49a4-8d87-f31795aa4430 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.163387] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2635ac0-d2c6-42c9-b904-f19432f50341 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.172885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfc9d3c-e81f-4f77-a51c-69c58e3fe7e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.188349] env[69994]: DEBUG nova.compute.provider_tree [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.214626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.219530] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 489b68f2-c2f2-4710-a06f-45ad8c577441] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1177.310239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.310933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.311245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.312324] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221ce8b9-1fff-420b-9ea7-95a750bb70d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.329745] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1177.329988] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.330172] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1177.330359] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.330507] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1177.330662] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1177.330924] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1177.331174] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1177.331357] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1177.331523] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1177.331697] env[69994]: DEBUG nova.virt.hardware [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1177.337966] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1177.338290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-156686e4-ce0c-4b78-bea5-658e2d67dace {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.357120] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1177.357120] env[69994]: value = "task-2926361" [ 1177.357120] env[69994]: _type = "Task" [ 1177.357120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.369017] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926361, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.481924] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926357, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.380303} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.483493] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17/OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17.vmdk to [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk. [ 1177.483493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Cleaning up location [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1177.483493] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_fbc1667d-0997-4b4d-ba37-19a108c0cb17 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.483493] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-522afda3-0db3-4a35-9945-07f06be18a9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.491876] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1177.491876] env[69994]: value = "task-2926362" [ 1177.491876] env[69994]: _type = "Task" [ 1177.491876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.501790] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.556459] env[69994]: DEBUG nova.compute.manager [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.556573] env[69994]: DEBUG oslo_concurrency.lockutils [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.556785] env[69994]: DEBUG oslo_concurrency.lockutils [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.556961] env[69994]: DEBUG oslo_concurrency.lockutils [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.557306] env[69994]: DEBUG nova.compute.manager [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] No waiting events found dispatching network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1177.557493] env[69994]: WARNING nova.compute.manager [req-96abc2bd-dd35-43b1-801f-28d3a467ae52 req-80dbaaff-2ce6-413c-99a3-bf8dc85bcbfe service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received unexpected event network-vif-plugged-da352ba6-e52b-4b13-8514-5db1e4d826ee for instance with vm_state shelved_offloaded and task_state spawning. [ 1177.584701] env[69994]: DEBUG oslo_vmware.api [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926360, 'name': PowerOnVM_Task, 'duration_secs': 1.334867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.584967] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1177.585195] env[69994]: DEBUG nova.compute.manager [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1177.585958] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc885daa-bf13-4640-9c94-2ad34961f29a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.642281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.642409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.642598] env[69994]: DEBUG nova.network.neutron [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.691435] env[69994]: DEBUG nova.scheduler.client.report [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.721243] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: a7dd4e89-a953-49b4-b56f-fdacef3a621b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1177.868201] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.001508] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038948} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.001794] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.001963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.002231] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk to [datastore1] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1178.002496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-888887bc-fc0c-420f-aca5-ec4152bd7eaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.010948] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1178.010948] env[69994]: value = "task-2926363" [ 1178.010948] env[69994]: _type = "Task" [ 1178.010948] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.019609] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.101707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.196648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.197280] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1178.200520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.357s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.201841] env[69994]: INFO nova.compute.claims [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.222026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1178.222026] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd7078fe-cc36-4342-8c9b-48bc25b66477 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.223890] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: c7c17fab-71a4-44df-907e-f7b408f80236] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.233048] env[69994]: DEBUG oslo_vmware.api [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1178.233048] env[69994]: value = "task-2926364" [ 1178.233048] env[69994]: _type = "Task" [ 1178.233048] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.245839] env[69994]: DEBUG oslo_vmware.api [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.373955] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926361, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.459780] env[69994]: DEBUG nova.network.neutron [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.523471] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.706581] env[69994]: DEBUG nova.compute.utils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1178.710241] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1178.710371] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1178.727219] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9dbaceb8-fa4d-40c4-9f0e-fa9749663a05] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1178.746819] env[69994]: DEBUG oslo_vmware.api [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926364, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.751610] env[69994]: DEBUG nova.policy [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de3fba71299348fab70f6e21e1028bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f5bb040f474df19739d5170639ff67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1178.874873] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926361, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.962886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.993526] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6e5f52770f90f79564ad7330bc0733b4',container_format='bare',created_at=2025-03-11T12:39:11Z,direct_url=,disk_format='vmdk',id=80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-836329664-shelved',owner='1a10b55bcc104c108604d402ec6d09ce',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-11T12:39:27Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.993798] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.993961] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.994257] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.994445] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.994706] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.994953] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.995165] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.995378] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.995580] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.995789] env[69994]: DEBUG nova.virt.hardware [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.997152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1009a7-27d0-423b-94ca-d9314e52ffde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.008312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07d0ae-bade-4f9f-9818-404b055416d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.032036] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.032686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:e8:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52c1f5eb-3d4a-4faa-a30d-2b0a46430791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da352ba6-e52b-4b13-8514-5db1e4d826ee', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1179.040807] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1179.041200] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1179.041489] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebefca59-3a5c-4789-bbab-11805e2eb062 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.063417] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1179.063417] env[69994]: value = "task-2926365" [ 1179.063417] env[69994]: _type = "Task" [ 1179.063417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.073272] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926365, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.205697] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Successfully created port: 39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1179.210638] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1179.230284] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 922799c0-707c-4f4e-a54c-f015eab0a8d7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1179.250655] env[69994]: DEBUG oslo_vmware.api [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926364, 'name': PowerOnVM_Task, 'duration_secs': 0.718663} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.251023] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1179.251275] env[69994]: DEBUG nova.compute.manager [None req-c5fbf0c3-fc79-4ba4-b846-28a289bae69b tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1179.252177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1372f701-fc8c-4c45-97d9-c0d4d44edc7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.373636] env[69994]: DEBUG oslo_vmware.api [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926361, 'name': ReconfigVM_Task, 'duration_secs': 1.633579} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.374263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.375377] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1179.396021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a0451d-0807-4c22-bfe1-0d95164fd09f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.405752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b304c0-c627-4c46-aa4f-e4f1365e1550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.441937] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0970bc5-64c7-461b-bfc2-c37181da3731 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.451523] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546790cf-4d4a-4ff4-a5fd-d08bc944b922 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.467095] env[69994]: DEBUG nova.compute.provider_tree [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.524844] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.575406] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926365, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.584027] env[69994]: DEBUG nova.compute.manager [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.584189] env[69994]: DEBUG nova.compute.manager [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing instance network info cache due to event network-changed-da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1179.584412] env[69994]: DEBUG oslo_concurrency.lockutils [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] Acquiring lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.584554] env[69994]: DEBUG oslo_concurrency.lockutils [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] Acquired lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.584716] env[69994]: DEBUG nova.network.neutron [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Refreshing network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.734667] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 6c81eb8b-78d7-469d-8076-13d8a8f61fec] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1179.880134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ee80f602-4a88-48b7-96af-34529d94edb4 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.500s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.971313] env[69994]: DEBUG nova.scheduler.client.report [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.026470] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.075762] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926365, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.220558] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1180.238172] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 4d415c4d-54b2-4324-8e98-9dc476960348] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.252485] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1180.252812] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1180.252978] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1180.253312] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1180.253518] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1180.253684] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1180.254495] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1180.254495] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1180.254495] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1180.254721] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1180.254721] env[69994]: DEBUG nova.virt.hardware [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1180.255674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3eb22e-6c8d-4793-aab9-33b698754661 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.268577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a2310d-210f-4ed8-bf1b-9b9320eb45b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.330623] env[69994]: DEBUG nova.network.neutron [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updated VIF entry in instance network info cache for port da352ba6-e52b-4b13-8514-5db1e4d826ee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.331118] env[69994]: DEBUG nova.network.neutron [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [{"id": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "address": "fa:16:3e:dd:e8:98", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda352ba6-e5", "ovs_interfaceid": "da352ba6-e52b-4b13-8514-5db1e4d826ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.477837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.478864] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1180.481366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.745s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.481611] env[69994]: DEBUG nova.objects.instance [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'resources' on Instance uuid 68eba44a-0989-47dc-a88b-102d9aa34c5d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.528239] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.579932] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926365, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.743233] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: d95cc7ec-c127-4fa1-bc39-3b5ffc951a2e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1180.819603] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Successfully updated port: 39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1180.833761] env[69994]: DEBUG oslo_concurrency.lockutils [req-5d861c59-03f2-413c-aed8-04307e3f96d8 req-ac43d368-52e2-4b72-9615-9a19598dcaae service nova] Releasing lock "refresh_cache-3c374550-d65b-494a-89d7-60720f6b44dc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.985183] env[69994]: DEBUG nova.compute.utils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1180.989426] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1180.989602] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.031450] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926363, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.561767} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.032976] env[69994]: DEBUG nova.policy [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7be902c21aba40e1ac159ffa787eea04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d72179a46b64984b9ef219161bfcd76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1181.034460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/757c5b7a-e993-4b3d-811b-7c0824b6c981/757c5b7a-e993-4b3d-811b-7c0824b6c981.vmdk to [datastore1] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1181.037459] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806ac259-bdc4-4c9b-b12b-47b351fa2f12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.073119] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.079096] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79b93659-657f-49b2-8a3d-dc3f011b633a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.113136] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926365, 'name': CreateVM_Task, 'duration_secs': 1.640377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.116664] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1181.117021] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1181.117021] env[69994]: value = "task-2926366" [ 1181.117021] env[69994]: _type = "Task" [ 1181.117021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.117972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.118154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.118515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1181.118823] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2dedc37-1650-4c2e-a729-73664f3788e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.127247] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1181.127247] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5291b0af-197c-7d82-2913-de0bf73b487c" [ 1181.127247] env[69994]: _type = "Task" [ 1181.127247] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.132734] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926366, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.144957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.144957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Processing image 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.144957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.144957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.145240] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.145427] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad9fbced-e569-4f28-aeb4-fd4847d457e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.158983] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.159283] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1181.162775] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3478739e-76d1-48ef-b774-b918ea9cef34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.170551] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1181.170551] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528649c0-550a-bbdb-430e-6e64e639c71a" [ 1181.170551] env[69994]: _type = "Task" [ 1181.170551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.182339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.182592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.186654] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528649c0-550a-bbdb-430e-6e64e639c71a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.212134] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a7dec4-f463-43c6-98fe-911cba0ba307 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.221038] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38551f1-cdce-4176-aee8-4c86e0dd93a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.255771] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 93f7f9dc-318c-41c9-b1eb-4ecd5c66fcd3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.255771] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d58bea7-1faf-4ff5-8aab-288ca1ebece8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.265759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e975dd-8ce0-4de0-871d-7d5e1212fdb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.290647] env[69994]: DEBUG nova.compute.provider_tree [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.322764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.322864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.322985] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.367829] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Successfully created port: 795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.490298] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1181.615971] env[69994]: DEBUG nova.compute.manager [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Received event network-vif-plugged-39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.616197] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.616411] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.616584] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.616748] env[69994]: DEBUG nova.compute.manager [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] No waiting events found dispatching network-vif-plugged-39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1181.616913] env[69994]: WARNING nova.compute.manager [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Received unexpected event network-vif-plugged-39e82227-f1d4-4f42-8137-5212b739413c for instance with vm_state building and task_state spawning. [ 1181.617106] env[69994]: DEBUG nova.compute.manager [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Received event network-changed-39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.617273] env[69994]: DEBUG nova.compute.manager [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Refreshing instance network info cache due to event network-changed-39e82227-f1d4-4f42-8137-5212b739413c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1181.617452] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Acquiring lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.629829] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926366, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.682162] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1181.682275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Fetch image to [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c/OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1181.682463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Downloading stream optimized image 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 to [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c/OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c.vmdk on the data store datastore1 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1181.682662] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Downloading image file data 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 to the ESX as VM named 'OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1181.687777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.687960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.688848] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4588226-438d-4b03-ad6e-bf3b79f34155 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.710040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9e57e1-2662-4149-b9c1-2b1f4fe31287 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.737336] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1181.753941] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de6a58a5-2d0b-4e0a-8e62-cba97be8cd1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.769183] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 686feb53-00e2-43d9-b316-09c089df0891] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1181.778108] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1181.778108] env[69994]: value = "task-2926367" [ 1181.778108] env[69994]: _type = "Task" [ 1181.778108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.787842] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.795118] env[69994]: DEBUG nova.scheduler.client.report [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.812022] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1181.812022] env[69994]: value = "resgroup-9" [ 1181.812022] env[69994]: _type = "ResourcePool" [ 1181.812022] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1181.812022] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-86dcc047-993e-445a-9178-59cfcfc00343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.838227] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease: (returnval){ [ 1181.838227] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1181.838227] env[69994]: _type = "HttpNfcLease" [ 1181.838227] env[69994]: } obtained for vApp import into resource pool (val){ [ 1181.838227] env[69994]: value = "resgroup-9" [ 1181.838227] env[69994]: _type = "ResourcePool" [ 1181.838227] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1181.838699] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the lease: (returnval){ [ 1181.838699] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1181.838699] env[69994]: _type = "HttpNfcLease" [ 1181.838699] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1181.846135] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1181.846135] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1181.846135] env[69994]: _type = "HttpNfcLease" [ 1181.846135] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1181.859998] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1182.016188] env[69994]: DEBUG nova.network.neutron [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.129716] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926366, 'name': ReconfigVM_Task, 'duration_secs': 0.766209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.130009] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 85293c91-f363-4085-9eb8-2bf6514fa2f1/85293c91-f363-4085-9eb8-2bf6514fa2f1.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.130641] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2479de59-8e1a-413c-a33a-abcae5645d93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.137816] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1182.137816] env[69994]: value = "task-2926369" [ 1182.137816] env[69994]: _type = "Task" [ 1182.137816] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.146370] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926369, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.272190] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 5b9648a7-f26f-4151-be5c-59991035a529] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.288108] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.299689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.302040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.200s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.302225] env[69994]: DEBUG nova.objects.instance [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1182.322180] env[69994]: INFO nova.scheduler.client.report [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleted allocations for instance 68eba44a-0989-47dc-a88b-102d9aa34c5d [ 1182.347667] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1182.347667] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1182.347667] env[69994]: _type = "HttpNfcLease" [ 1182.347667] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1182.500495] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1182.523222] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1182.523482] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1182.523639] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1182.523825] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1182.523973] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1182.524140] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1182.524367] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1182.524514] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1182.524682] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1182.524847] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1182.525035] env[69994]: DEBUG nova.virt.hardware [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1182.525509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.525790] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Instance network_info: |[{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1182.526606] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37f7996-5fa0-4386-addf-ea730f7c87d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.529293] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Acquired lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.529593] env[69994]: DEBUG nova.network.neutron [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Refreshing network info cache for port 39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1182.530797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:2f:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39e82227-f1d4-4f42-8137-5212b739413c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.538128] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1182.539091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.539792] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8af182ce-3011-4bcf-b032-e1903ffec373 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.558266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d042623-7fe9-449f-a683-13a76f48d3c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.565693] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.565693] env[69994]: value = "task-2926370" [ 1182.565693] env[69994]: _type = "Task" [ 1182.565693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.582096] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926370, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.648653] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926369, 'name': Rename_Task, 'duration_secs': 0.260005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.648959] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.649235] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-061c3fd4-3b42-4d34-9c84-0e46c507b106 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.656422] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1182.656422] env[69994]: value = "task-2926371" [ 1182.656422] env[69994]: _type = "Task" [ 1182.656422] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.664962] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.775465] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1ff25686-e13e-4003-909b-18bf919aa20c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1182.791067] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.829066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12c4181e-05f3-49eb-aee1-832ea5bf9e3b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "68eba44a-0989-47dc-a88b-102d9aa34c5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.738s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.848525] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1182.848525] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1182.848525] env[69994]: _type = "HttpNfcLease" [ 1182.848525] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1182.848970] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1182.848970] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5226ce6f-172c-bdcd-91e7-18cf8d4f2764" [ 1182.848970] env[69994]: _type = "HttpNfcLease" [ 1182.848970] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1182.850263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69acbaf0-c354-427b-a706-2b2050322ef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.860951] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1182.861176] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1182.929701] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-263ffa5f-e30d-4342-adaf-26d0497ffde9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.949298] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Successfully updated port: 795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1183.077315] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926370, 'name': CreateVM_Task, 'duration_secs': 0.319363} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.077594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.078262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.078441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.078752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1183.079043] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153f8e7d-34dc-4ad3-b218-65a0ba81741d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.084072] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1183.084072] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c23e61-e40c-a1ec-b0b7-52ce0ed9a143" [ 1183.084072] env[69994]: _type = "Task" [ 1183.084072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.092566] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c23e61-e40c-a1ec-b0b7-52ce0ed9a143, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.172169] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926371, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.279018] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f946992b-faf2-4580-adcd-806d3b8fd104] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1183.295967] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.311200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-958c9ddc-9cc1-4b1e-b5cd-5d10ca9df1eb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.333953] env[69994]: DEBUG nova.network.neutron [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updated VIF entry in instance network info cache for port 39e82227-f1d4-4f42-8137-5212b739413c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1183.334412] env[69994]: DEBUG nova.network.neutron [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.453261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.453261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.453433] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.597501] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c23e61-e40c-a1ec-b0b7-52ce0ed9a143, 'name': SearchDatastore_Task, 'duration_secs': 0.01164} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.597832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.598082] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.598324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.598467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.598658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.599286] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00c6cba6-183e-476f-bdf9-025b432aa374 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.608338] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.608523] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1183.609291] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2facadb2-2467-47e1-b5b4-94fa999d9797 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.616100] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1183.616100] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d4255-8313-dc45-8a7e-0dcbab3e593d" [ 1183.616100] env[69994]: _type = "Task" [ 1183.616100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.623896] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d4255-8313-dc45-8a7e-0dcbab3e593d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.652745] env[69994]: DEBUG nova.compute.manager [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Received event network-vif-plugged-795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.652992] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.653248] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.653491] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.653630] env[69994]: DEBUG nova.compute.manager [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] No waiting events found dispatching network-vif-plugged-795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.655514] env[69994]: WARNING nova.compute.manager [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Received unexpected event network-vif-plugged-795b02e6-feba-409a-ad9f-5932d55da938 for instance with vm_state building and task_state spawning. [ 1183.656484] env[69994]: DEBUG nova.compute.manager [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Received event network-changed-795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.656691] env[69994]: DEBUG nova.compute.manager [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Refreshing instance network info cache due to event network-changed-795b02e6-feba-409a-ad9f-5932d55da938. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1183.656882] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Acquiring lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.670204] env[69994]: DEBUG oslo_vmware.api [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926371, 'name': PowerOnVM_Task, 'duration_secs': 0.56513} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.670491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.775421] env[69994]: DEBUG nova.compute.manager [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.776421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa9e138-ed1e-4f58-814a-9d62259000a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.787274] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 29326ab7-2b4b-42af-a90c-e86510bcd443] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1183.799300] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.837511] env[69994]: DEBUG oslo_concurrency.lockutils [req-3300553f-d446-4a8b-8322-8da6082f956f req-0ee907fc-cebb-43c7-9b73-967c8c1638fb service nova] Releasing lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.997310] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.128749] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d4255-8313-dc45-8a7e-0dcbab3e593d, 'name': SearchDatastore_Task, 'duration_secs': 0.02174} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.129513] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1184.130130] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1184.131510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72e1abb-564e-4f5e-be85-53dea27a7797 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.134164] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553c8cff-9fed-49f9-8fc3-bad311e19704 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.140453] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1184.140646] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1184.141943] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4b35b4d9-b1dc-4597-92f5-78a019700de7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.143510] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1184.143510] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a9338d-fc80-5c1f-21fc-0cd55974d483" [ 1184.143510] env[69994]: _type = "Task" [ 1184.143510] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.151406] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a9338d-fc80-5c1f-21fc-0cd55974d483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.242349] env[69994]: DEBUG nova.network.neutron [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating instance_info_cache with network_info: [{"id": "795b02e6-feba-409a-ad9f-5932d55da938", "address": "fa:16:3e:7c:ae:c1", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b02e6-fe", "ovs_interfaceid": "795b02e6-feba-409a-ad9f-5932d55da938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.292409] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.294954] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 5e142f6e-920a-4f11-abff-13eb5c168660] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.303326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a11a8092-5009-40f1-8f16-25b6c0d0831b tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.452s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.367230] env[69994]: DEBUG oslo_vmware.rw_handles [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52357fa9-efe9-5b1f-9fe5-ff0863e1c1a1/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1184.367470] env[69994]: INFO nova.virt.vmwareapi.images [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Downloaded image file data 80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 [ 1184.368642] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ef49f4-465c-4f18-9e61-06c40046ab8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.384217] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95e026e7-0643-4a20-bcad-566cb04a9e08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.426490] env[69994]: INFO nova.virt.vmwareapi.images [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] The imported VM was unregistered [ 1184.429560] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1184.429817] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.430114] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-825f0a13-ab84-4057-80c6-682ac9ec68cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.440866] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.441074] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c/OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c.vmdk to [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1184.441340] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-be47879a-d84e-4941-8dab-041dd317467a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.447966] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1184.447966] env[69994]: value = "task-2926373" [ 1184.447966] env[69994]: _type = "Task" [ 1184.447966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.455657] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.653801] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a9338d-fc80-5c1f-21fc-0cd55974d483, 'name': SearchDatastore_Task, 'duration_secs': 0.010744} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.654235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.654517] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1184.654672] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57bdcab1-6304-4a5e-80cd-7e981fd20de0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.660991] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1184.660991] env[69994]: value = "task-2926374" [ 1184.660991] env[69994]: _type = "Task" [ 1184.660991] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.669220] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.745909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.746454] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Instance network_info: |[{"id": "795b02e6-feba-409a-ad9f-5932d55da938", "address": "fa:16:3e:7c:ae:c1", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b02e6-fe", "ovs_interfaceid": "795b02e6-feba-409a-ad9f-5932d55da938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1184.746886] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Acquired lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.747133] env[69994]: DEBUG nova.network.neutron [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Refreshing network info cache for port 795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.748827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:ae:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53915f38-d7a0-42ec-8b30-1eacfb2cc379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '795b02e6-feba-409a-ad9f-5932d55da938', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1184.758588] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1184.762209] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1184.762824] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cfaeae5-bcdb-463a-bd64-3918c0a94b6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.788545] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1184.788545] env[69994]: value = "task-2926375" [ 1184.788545] env[69994]: _type = "Task" [ 1184.788545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.796754] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.802844] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: d1875a97-9eba-47be-a76d-6088cb13412b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1184.804933] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926375, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.959134] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.075942] env[69994]: DEBUG nova.network.neutron [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updated VIF entry in instance network info cache for port 795b02e6-feba-409a-ad9f-5932d55da938. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1185.076505] env[69994]: DEBUG nova.network.neutron [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating instance_info_cache with network_info: [{"id": "795b02e6-feba-409a-ad9f-5932d55da938", "address": "fa:16:3e:7c:ae:c1", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b02e6-fe", "ovs_interfaceid": "795b02e6-feba-409a-ad9f-5932d55da938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.174418] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.296411] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.302240] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926375, 'name': CreateVM_Task, 'duration_secs': 0.362456} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.302472] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1185.303204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.303366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.303691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1185.304138] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77560945-7690-4df4-a3fe-4a2cbc88cb23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.306163] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: c98308b3-2431-4f17-9022-bcd9f1e83a35] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1185.311946] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1185.311946] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fea0ad-1013-9a22-3bbc-e3ca97c884a5" [ 1185.311946] env[69994]: _type = "Task" [ 1185.311946] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.322828] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fea0ad-1013-9a22-3bbc-e3ca97c884a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.459714] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.583096] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f3d4299-f80b-4f9d-bfd2-d07ef32b9c29 req-cfae43e9-8457-4a24-922b-370277f86c84 service nova] Releasing lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.673291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.674209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.678611] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.793741] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.810063] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 76dbf172-10b2-4439-9d2a-8226ba46062d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1185.822168] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fea0ad-1013-9a22-3bbc-e3ca97c884a5, 'name': SearchDatastore_Task, 'duration_secs': 0.089495} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.822500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.822722] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1185.822967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.823146] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.823343] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1185.823619] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d51f089-7b5d-41f8-b4d9-a44418c58aa8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.840033] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1185.840262] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1185.841745] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e030ca83-e03f-4a60-b08b-4c719ddf3a23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.847602] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1185.847602] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521cf1be-5674-afd3-3d9a-4c40d2e21fbc" [ 1185.847602] env[69994]: _type = "Task" [ 1185.847602] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.856115] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521cf1be-5674-afd3-3d9a-4c40d2e21fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.955469] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f4e738-aeb1-48a9-8964-10492dae2d80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.963318] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.965016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1185.965286] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2d6a5560-5cb2-436c-9bca-404c4331db51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.971702] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1185.971702] env[69994]: value = "task-2926376" [ 1185.971702] env[69994]: _type = "Task" [ 1185.971702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.979998] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926376, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.173470] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.180162] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1186.296405] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.316196] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e1c00159-d198-4858-b5a3-aa05152b1fda] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1186.358437] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521cf1be-5674-afd3-3d9a-4c40d2e21fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.084245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.359362] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4f4c8ea-02db-4fe1-86ba-dbe7a2c4b5c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.365216] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1186.365216] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520570a5-f91c-c2d5-0884-f6be0a34c1a1" [ 1186.365216] env[69994]: _type = "Task" [ 1186.365216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.373709] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520570a5-f91c-c2d5-0884-f6be0a34c1a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.461446] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.481805] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926376, 'name': SuspendVM_Task} progress is 12%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.674722] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.706493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.706797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.708593] env[69994]: INFO nova.compute.claims [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.795073] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.819500] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: e03bc64f-70e9-4097-a1e1-ebf8f86508ed] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1186.876863] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520570a5-f91c-c2d5-0884-f6be0a34c1a1, 'name': SearchDatastore_Task, 'duration_secs': 0.119635} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.877246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.877560] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 7963eb9f-66a1-417b-928b-3b5cef7847be/7963eb9f-66a1-417b-928b-3b5cef7847be.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.877867] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce8be2a7-37a2-40dd-abde-f14cf77b69d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.886175] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1186.886175] env[69994]: value = "task-2926377" [ 1186.886175] env[69994]: _type = "Task" [ 1186.886175] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.895359] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.962400] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.983103] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926376, 'name': SuspendVM_Task} progress is 70%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.175350] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926374, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.355809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.175682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1187.175907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1187.176209] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab5b6f2e-c12a-4022-9f4f-01220c4a9168 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.183823] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1187.183823] env[69994]: value = "task-2926378" [ 1187.183823] env[69994]: _type = "Task" [ 1187.183823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.192156] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.295619] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.323459] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f2ae08e9-fbf3-49ab-8290-75f8a53d6030] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1187.396572] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926377, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.463108] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926373, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.646558} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.463440] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c/OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c.vmdk to [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk. [ 1187.463638] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Cleaning up location [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1187.463893] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_97fbe77f-59b7-4afb-afa2-b7c613296b9c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.464310] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eba0c27-e860-4c31-aa7c-29e035903686 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.471150] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1187.471150] env[69994]: value = "task-2926379" [ 1187.471150] env[69994]: _type = "Task" [ 1187.471150] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.486022] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.489176] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926376, 'name': SuspendVM_Task} progress is 70%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.695936] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.696325] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1187.697226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb72e7b4-c01b-4040-84df-875d90e9612b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.722100] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.725335] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a56e11e-83ea-4564-b459-f883dfbb8346 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.746650] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1187.746650] env[69994]: value = "task-2926380" [ 1187.746650] env[69994]: _type = "Task" [ 1187.746650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.755635] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.794981] env[69994]: DEBUG oslo_vmware.api [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926367, 'name': ReconfigVM_Task, 'duration_secs': 5.754163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.797451] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.797704] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1187.826870] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ee68a538-d803-4bd6-9117-b021b28da899] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1187.879098] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfc3620-3c79-4a5a-8c97-22cfada34f64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.886546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c08c77-a727-489b-99a7-a5392ffb19ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.897075] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.923293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 7963eb9f-66a1-417b-928b-3b5cef7847be/7963eb9f-66a1-417b-928b-3b5cef7847be.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1187.923532] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1187.924282] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ff9324b-97ff-4a07-8aed-ffaca76a3848 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.926531] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa8e8f0-a665-4744-8fe1-cac048e0678f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.935415] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2114b6ab-4a79-4c79-81a7-fb80c6ae5848 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.940013] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1187.940013] env[69994]: value = "task-2926381" [ 1187.940013] env[69994]: _type = "Task" [ 1187.940013] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.951409] env[69994]: DEBUG nova.compute.provider_tree [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.960024] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.984081] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.986958] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.987148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.987394] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk to [datastore1] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.987651] env[69994]: DEBUG oslo_vmware.api [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926376, 'name': SuspendVM_Task, 'duration_secs': 1.636584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.987842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c169fa6-56ae-499f-a2d9-de2333871e8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.989784] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1187.989956] env[69994]: DEBUG nova.compute.manager [None req-b12a1748-50c8-4dda-bc19-18aebac5366a tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1187.991102] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cdf0e1-bd5e-4b48-b726-cb6dbff91556 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.000167] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1188.000167] env[69994]: value = "task-2926382" [ 1188.000167] env[69994]: _type = "Task" [ 1188.000167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.008036] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.256663] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.331989] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1e19dc4d-c3dd-41e7-819f-30d54cb1390e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1188.452197] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06138} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.452561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.453568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcb69e3-b15f-43b2-9e44-b416064ac55a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.457092] env[69994]: DEBUG nova.scheduler.client.report [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.482912] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 7963eb9f-66a1-417b-928b-3b5cef7847be/7963eb9f-66a1-417b-928b-3b5cef7847be.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.483810] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-959b1e3f-6caf-4e62-9f5e-53561e04e75a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.510588] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1188.510588] env[69994]: value = "task-2926383" [ 1188.510588] env[69994]: _type = "Task" [ 1188.510588] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.518225] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.525182] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.757765] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.836284] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 660277f8-a7ff-43a9-8068-15e3db5a1069] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1188.964752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.964752] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1189.015251] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.025649] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.058398] env[69994]: DEBUG nova.compute.manager [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.058398] env[69994]: DEBUG nova.compute.manager [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing instance network info cache due to event network-changed-be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1189.058398] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.058398] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.058398] env[69994]: DEBUG nova.network.neutron [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Refreshing network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.067080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.258270] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926380, 'name': ReconfigVM_Task, 'duration_secs': 1.329903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.258575] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.259308] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c74099d3-9161-4e7d-ad2e-56991f558096 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.266214] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1189.266214] env[69994]: value = "task-2926384" [ 1189.266214] env[69994]: _type = "Task" [ 1189.266214] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.277246] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926384, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.340551] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 15595947-b944-4c82-90ae-883ed951c909] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1189.471643] env[69994]: DEBUG nova.compute.utils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1189.473358] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1189.473512] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1189.511282] env[69994]: DEBUG nova.policy [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e347ed38e9174950b600fb3f5a9ad65a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e63c47302d14d849b239a91580a25ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1189.518751] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.527644] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926383, 'name': ReconfigVM_Task, 'duration_secs': 0.595918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.528326] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 7963eb9f-66a1-417b-928b-3b5cef7847be/7963eb9f-66a1-417b-928b-3b5cef7847be.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.528746] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1db4f518-1fcc-4af7-af5d-1efaf59b2700 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.535799] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1189.535799] env[69994]: value = "task-2926385" [ 1189.535799] env[69994]: _type = "Task" [ 1189.535799] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.546525] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926385, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.576571] env[69994]: INFO nova.compute.manager [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Resuming [ 1189.577413] env[69994]: DEBUG nova.objects.instance [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'flavor' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.775720] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926384, 'name': Rename_Task, 'duration_secs': 0.158342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.775914] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.776210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34cc812d-d2de-49ac-bfc6-17fc52b4a4dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.783194] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1189.783194] env[69994]: value = "task-2926386" [ 1189.783194] env[69994]: _type = "Task" [ 1189.783194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.792124] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.799872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.800485] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.800947] env[69994]: DEBUG nova.objects.instance [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'flavor' on Instance uuid 395a4d39-29ae-4443-949f-4737e7e2341e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.843611] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 83cef95b-99a5-4e6e-8258-79b380b595b3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1189.850392] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Successfully created port: e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.872398] env[69994]: DEBUG nova.network.neutron [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updated VIF entry in instance network info cache for port be3723ea-e18d-4908-bb9b-d8bbce5d3cee. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1189.872878] env[69994]: DEBUG nova.network.neutron [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "address": "fa:16:3e:a6:53:3b", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0f982d-cb", "ovs_interfaceid": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.977294] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1190.017973] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.046060] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926385, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.294028] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926386, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.349960] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 93087ec4-1d88-47cc-b1d2-0f1697556eae] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1190.376782] env[69994]: DEBUG oslo_concurrency.lockutils [req-9d640b9d-dccc-4014-a1f0-aff81f65083f req-ca16dcbc-6458-4ec0-8859-e86afa4f9afd service nova] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.377263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.378092] env[69994]: DEBUG nova.network.neutron [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.397093] env[69994]: DEBUG nova.objects.instance [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'pci_requests' on Instance uuid 395a4d39-29ae-4443-949f-4737e7e2341e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.515691] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926382, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.43425} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.516762] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77/80c4f886-b3ef-41c1-bbd5-1e1ead1c8d77.vmdk to [datastore1] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1190.517580] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1ab56d-0d19-4bd1-bb1f-71c11c974d82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.539604] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.540118] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7269b38-7749-46f7-b33f-08b883dab92c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.561363] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926385, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.562513] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1190.562513] env[69994]: value = "task-2926387" [ 1190.562513] env[69994]: _type = "Task" [ 1190.562513] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.570780] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926387, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.795101] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926386, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.852844] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: d28a6129-1bfe-40da-bc91-c68cf874aa36] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1190.900054] env[69994]: DEBUG nova.objects.base [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Object Instance<395a4d39-29ae-4443-949f-4737e7e2341e> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1190.900054] env[69994]: DEBUG nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1190.986779] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1191.014193] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1191.014547] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.014816] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.015088] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.015305] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.015513] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1191.015783] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1191.016064] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1191.016452] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1191.016728] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1191.016984] env[69994]: DEBUG nova.virt.hardware [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1191.018415] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791daad2-635f-460c-b457-c4bd5f857ec0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.027476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ce6130-e4c5-4ac6-ae3f-d26bbeba1e49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.052306] env[69994]: DEBUG nova.policy [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9c7ff94bd744305a13df72dbf967c11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d57a69e0924b9abc2cc4e67fc8173c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1191.064331] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926385, 'name': Rename_Task, 'duration_secs': 1.17345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.067510] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.067761] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffae4eb1-5829-48ef-a8fd-c3154682bd5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.073544] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926387, 'name': ReconfigVM_Task, 'duration_secs': 0.317411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.074695] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 3c374550-d65b-494a-89d7-60720f6b44dc/3c374550-d65b-494a-89d7-60720f6b44dc.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.075834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_format': None, 'boot_index': 0, 'device_type': 'disk', 'encrypted': False, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'size': 0, 'image_id': 'f75f967d-5bd8-4c15-9a52-96f7e9dd9d48'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587642', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'name': 'volume-a387ddfa-4996-4758-be71-d088f121096d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '3c374550-d65b-494a-89d7-60720f6b44dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'serial': 'a387ddfa-4996-4758-be71-d088f121096d'}, 'attachment_id': 'eeb53b60-d90b-432a-969c-557288dd34c7', 'device_type': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1191.076052] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1191.076251] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587642', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'name': 'volume-a387ddfa-4996-4758-be71-d088f121096d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '3c374550-d65b-494a-89d7-60720f6b44dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'serial': 'a387ddfa-4996-4758-be71-d088f121096d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1191.076561] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1191.076561] env[69994]: value = "task-2926388" [ 1191.076561] env[69994]: _type = "Task" [ 1191.076561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.077244] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ca032c-0704-4304-a518-1b25c88eb74f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.097710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.097927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquired lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.098087] env[69994]: DEBUG nova.network.neutron [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.103823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bfe5f0-3a5b-4c2b-b2d4-548f7da0f4bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.106804] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926388, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.133498] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] volume-a387ddfa-4996-4758-be71-d088f121096d/volume-a387ddfa-4996-4758-be71-d088f121096d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1191.137668] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbf63f01-9329-4488-91e5-b38060eab5ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.156921] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1191.156921] env[69994]: value = "task-2926389" [ 1191.156921] env[69994]: _type = "Task" [ 1191.156921] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.166173] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.204491] env[69994]: DEBUG nova.compute.manager [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.204703] env[69994]: DEBUG nova.compute.manager [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing instance network info cache due to event network-changed-7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1191.204917] env[69994]: DEBUG oslo_concurrency.lockutils [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.205065] env[69994]: DEBUG oslo_concurrency.lockutils [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.205227] env[69994]: DEBUG nova.network.neutron [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.294991] env[69994]: DEBUG oslo_vmware.api [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926386, 'name': PowerOnVM_Task, 'duration_secs': 1.429531} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.295281] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.295505] env[69994]: INFO nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Took 11.07 seconds to spawn the instance on the hypervisor. [ 1191.295769] env[69994]: DEBUG nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1191.296520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd13cc2d-6e11-4bb2-8788-0a2fb08dd6e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.319216] env[69994]: INFO nova.network.neutron [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Port 1c0f982d-cb97-4c63-b8e5-af47421200c1 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1191.319526] env[69994]: DEBUG nova.network.neutron [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [{"id": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "address": "fa:16:3e:3a:fd:a2", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe3723ea-e1", "ovs_interfaceid": "be3723ea-e18d-4908-bb9b-d8bbce5d3cee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.356215] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 43119e21-5226-482c-b640-33e73051a563] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1191.401146] env[69994]: DEBUG nova.network.neutron [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [{"id": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "address": "fa:16:3e:96:f0:2c", "network": {"id": "ca055ef0-8a45-4457-a25c-226ccd592aa9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1472062423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0af2d3f09d264d4c9bba8747f74383bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfa2bab-5c", "ovs_interfaceid": "dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.429354] env[69994]: DEBUG nova.compute.manager [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Received event network-vif-plugged-e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.429586] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.429896] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.429953] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.430134] env[69994]: DEBUG nova.compute.manager [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] No waiting events found dispatching network-vif-plugged-e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1191.430298] env[69994]: WARNING nova.compute.manager [req-7a2ab8bd-0aab-45e6-bea3-4d7b9ab86af7 req-9c7da501-79f8-4ee3-90f3-e82cc1174605 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Received unexpected event network-vif-plugged-e4d88c42-18f3-404a-8d4a-68852d25e55f for instance with vm_state building and task_state spawning. [ 1191.505532] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Successfully updated port: e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1191.589751] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926388, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.667975] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926389, 'name': ReconfigVM_Task, 'duration_secs': 0.376442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.668241] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfigured VM instance instance-0000005e to attach disk [datastore2] volume-a387ddfa-4996-4758-be71-d088f121096d/volume-a387ddfa-4996-4758-be71-d088f121096d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.673450] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75b78a98-ef6a-41e1-b008-73cc8ca6c3d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.688082] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1191.688082] env[69994]: value = "task-2926390" [ 1191.688082] env[69994]: _type = "Task" [ 1191.688082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.697814] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926390, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.817684] env[69994]: INFO nova.compute.manager [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Took 25.25 seconds to build instance. [ 1191.822611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-87c5b8e4-166c-44b9-a179-1afaef751434" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.859038] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f0b77732-aae1-4790-a2c7-75586e78eda6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1191.904872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Releasing lock "refresh_cache-85293c91-f363-4085-9eb8-2bf6514fa2f1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.905828] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e583f6d-fe35-4ec8-b074-3032d5346348 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.913010] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Resuming the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1191.913267] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d91ff625-8853-450d-abd1-d7464e4fef5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.919261] env[69994]: DEBUG oslo_vmware.api [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1191.919261] env[69994]: value = "task-2926391" [ 1191.919261] env[69994]: _type = "Task" [ 1191.919261] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.927155] env[69994]: DEBUG oslo_vmware.api [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.938861] env[69994]: DEBUG nova.network.neutron [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updated VIF entry in instance network info cache for port 7608b3ed-dbf1-48c0-a088-071f08980220. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.939263] env[69994]: DEBUG nova.network.neutron [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.007738] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.007930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.008100] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.091341] env[69994]: DEBUG oslo_vmware.api [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926388, 'name': PowerOnVM_Task, 'duration_secs': 0.816611} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.091625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.091847] env[69994]: INFO nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Took 9.59 seconds to spawn the instance on the hypervisor. [ 1192.092058] env[69994]: DEBUG nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.092865] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b4042c-7dd3-466a-bdd3-887b654b87c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.198686] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926390, 'name': ReconfigVM_Task, 'duration_secs': 0.151785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.199067] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587642', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'name': 'volume-a387ddfa-4996-4758-be71-d088f121096d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '3c374550-d65b-494a-89d7-60720f6b44dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'serial': 'a387ddfa-4996-4758-be71-d088f121096d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1192.199971] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-255a786a-eb00-4926-9632-b5738511dc1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.206851] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1192.206851] env[69994]: value = "task-2926392" [ 1192.206851] env[69994]: _type = "Task" [ 1192.206851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.215651] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926392, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.320362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f99f4e6d-4768-4394-b55d-90097a1bd491 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.759s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.327698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-372b2442-a679-4d2b-bef0-1ce69b4d2b9c tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-87c5b8e4-166c-44b9-a179-1afaef751434-1c0f982d-cb97-4c63-b8e5-af47421200c1" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.145s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.362599] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 767ecd3d-631d-43b5-8ebf-28b6cb2077e9] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1192.433440] env[69994]: DEBUG oslo_vmware.api [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926391, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.442478] env[69994]: DEBUG oslo_concurrency.lockutils [req-db418e76-a1c8-4f68-9a5e-ab945a9dd0e1 req-fc30a8d7-4812-4e53-a9c6-f187002a12e4 service nova] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.545436] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1192.613849] env[69994]: INFO nova.compute.manager [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Took 22.88 seconds to build instance. [ 1192.703598] env[69994]: DEBUG nova.network.neutron [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating instance_info_cache with network_info: [{"id": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "address": "fa:16:3e:13:c9:68", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4d88c42-18", "ovs_interfaceid": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.716345] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926392, 'name': Rename_Task, 'duration_secs': 0.324044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.716687] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1192.716933] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a353a89b-c908-4523-a770-962cfd2d3969 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.724125] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1192.724125] env[69994]: value = "task-2926393" [ 1192.724125] env[69994]: _type = "Task" [ 1192.724125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.740184] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.868456] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 309e5014-a43f-4346-9c11-036eb36c8c1f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1192.878955] env[69994]: DEBUG nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Successfully updated port: 1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1192.930990] env[69994]: DEBUG oslo_vmware.api [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926391, 'name': PowerOnVM_Task, 'duration_secs': 0.541822} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.931292] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Resumed the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1192.931470] env[69994]: DEBUG nova.compute.manager [None req-bbc24407-97aa-4453-a85a-afc3af8f55cb tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.932366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3813705f-9ff9-4e37-a1cb-a0db00bfdbfa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.116534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ef14acec-a93d-4085-8d23-b6d0edd3999d tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.394s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.207795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.208302] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Instance network_info: |[{"id": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "address": "fa:16:3e:13:c9:68", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4d88c42-18", "ovs_interfaceid": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1193.208869] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c9:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4d88c42-18f3-404a-8d4a-68852d25e55f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.217133] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.217382] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.217618] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05d911ef-7a8e-4775-8754-96b82a7e17d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.235402] env[69994]: DEBUG nova.compute.manager [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.235630] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.236109] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.236330] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.236525] env[69994]: DEBUG nova.compute.manager [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] No waiting events found dispatching network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.236834] env[69994]: WARNING nova.compute.manager [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received unexpected event network-vif-plugged-1c0f982d-cb97-4c63-b8e5-af47421200c1 for instance with vm_state active and task_state None. [ 1193.236834] env[69994]: DEBUG nova.compute.manager [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-changed-1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.237133] env[69994]: DEBUG nova.compute.manager [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing instance network info cache due to event network-changed-1c0f982d-cb97-4c63-b8e5-af47421200c1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1193.237297] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.237397] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.237587] env[69994]: DEBUG nova.network.neutron [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Refreshing network info cache for port 1c0f982d-cb97-4c63-b8e5-af47421200c1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.249081] env[69994]: DEBUG oslo_vmware.api [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926393, 'name': PowerOnVM_Task, 'duration_secs': 0.501718} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.250874] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1193.252734] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.252734] env[69994]: value = "task-2926394" [ 1193.252734] env[69994]: _type = "Task" [ 1193.252734] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.261971] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926394, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.354202] env[69994]: DEBUG nova.compute.manager [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.355174] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6280487-a64f-4ba2-95c3-a5afbe09be6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.369189] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: b80a405e-a02e-4b18-a325-753146533d1b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1193.381134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.522621] env[69994]: DEBUG nova.compute.manager [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Received event network-changed-e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.522860] env[69994]: DEBUG nova.compute.manager [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Refreshing instance network info cache due to event network-changed-e4d88c42-18f3-404a-8d4a-68852d25e55f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1193.523150] env[69994]: DEBUG oslo_concurrency.lockutils [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] Acquiring lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.523323] env[69994]: DEBUG oslo_concurrency.lockutils [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] Acquired lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.523504] env[69994]: DEBUG nova.network.neutron [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Refreshing network info cache for port e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.599553] env[69994]: DEBUG nova.compute.manager [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1193.763806] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926394, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.873304] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 565066c4-2f33-44c6-8e82-4c6d729cd0b7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1193.879116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-363a71da-3a01-4a81-b96a-2a4e48456fdc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.918s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.040179] env[69994]: DEBUG nova.network.neutron [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Added VIF to instance network info cache for port 1c0f982d-cb97-4c63-b8e5-af47421200c1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1194.040179] env[69994]: DEBUG nova.network.neutron [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "address": "fa:16:3e:a6:53:3b", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0f982d-cb", "ovs_interfaceid": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.136994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.136994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.264774] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926394, 'name': CreateVM_Task, 'duration_secs': 0.58263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.264953] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1194.265659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.265846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.266232] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1194.266496] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e4a06aa-defb-48c6-a403-4482e20b4cce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.271543] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1194.271543] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52977a42-ef09-fa38-f1c7-ad8df75846fe" [ 1194.271543] env[69994]: _type = "Task" [ 1194.271543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.279665] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52977a42-ef09-fa38-f1c7-ad8df75846fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.385015] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: d5af7ae1-d68e-4170-b762-e56d7f2551d7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1194.512552] env[69994]: DEBUG nova.network.neutron [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updated VIF entry in instance network info cache for port e4d88c42-18f3-404a-8d4a-68852d25e55f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1194.513120] env[69994]: DEBUG nova.network.neutron [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating instance_info_cache with network_info: [{"id": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "address": "fa:16:3e:13:c9:68", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4d88c42-18", "ovs_interfaceid": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.542645] env[69994]: DEBUG oslo_concurrency.lockutils [req-7be2cb86-dbd2-498e-9497-3d8b5b07d652 req-ec245fd3-1dd5-4aaa-ba5e-97590dedea7c service nova] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.543335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.543464] env[69994]: DEBUG nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.642773] env[69994]: INFO nova.compute.claims [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.783887] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52977a42-ef09-fa38-f1c7-ad8df75846fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01484} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.783887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.783887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.785184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.785184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.785184] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.785184] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c125ca78-0fe6-4e93-9437-6beae55b7646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.802556] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.802756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1194.803527] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5aba928-7431-4c3b-a949-31372d601bb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.810431] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1194.810431] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e845a5-15ea-e65c-92db-f65ddb50bbef" [ 1194.810431] env[69994]: _type = "Task" [ 1194.810431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.820908] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e845a5-15ea-e65c-92db-f65ddb50bbef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.888620] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 06fa5ab5-baab-466e-8574-5391247c13a8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1195.016956] env[69994]: DEBUG oslo_concurrency.lockutils [req-86976eb9-2c50-49ce-986b-61176debb896 req-39596a15-a893-4e8f-b453-47ffcdbec708 service nova] Releasing lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.078856] env[69994]: WARNING nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1195.079090] env[69994]: WARNING nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c already exists in list: networks containing: ['dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c']. ignoring it [ 1195.079265] env[69994]: WARNING nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] 1c0f982d-cb97-4c63-b8e5-af47421200c1 already exists in list: port_ids containing: ['1c0f982d-cb97-4c63-b8e5-af47421200c1']. ignoring it [ 1195.149791] env[69994]: INFO nova.compute.resource_tracker [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating resource usage from migration 4acf8ba3-1369-4244-be25-3e969ceeb501 [ 1195.288858] env[69994]: DEBUG nova.compute.manager [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Received event network-changed-795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.289133] env[69994]: DEBUG nova.compute.manager [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Refreshing instance network info cache due to event network-changed-795b02e6-feba-409a-ad9f-5932d55da938. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1195.289287] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] Acquiring lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.289430] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] Acquired lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.289591] env[69994]: DEBUG nova.network.neutron [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Refreshing network info cache for port 795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.317941] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7399a1-f67a-40c6-bd15-0015cf1773a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.326252] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e845a5-15ea-e65c-92db-f65ddb50bbef, 'name': SearchDatastore_Task, 'duration_secs': 0.018578} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.328707] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41245ab1-f5d8-43fe-9db4-881fadfa2c33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.331543] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bb2bed-21ce-4615-a226-74bff645a4c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.338402] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1195.338402] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525559a9-be8d-8a53-e18c-28d416c70f44" [ 1195.338402] env[69994]: _type = "Task" [ 1195.338402] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.367473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b76e5d0-6b91-44db-820d-19ba64d8dc24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.377673] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a656f777-69ae-4ac2-8456-684b815b5e99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.381443] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525559a9-be8d-8a53-e18c-28d416c70f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.393110] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 5acdf02b-f61c-46ff-9c36-8e86b9be7738] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1195.395462] env[69994]: DEBUG nova.compute.provider_tree [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.404427] env[69994]: DEBUG nova.network.neutron [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "address": "fa:16:3e:a6:53:3b", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c0f982d-cb", "ovs_interfaceid": "1c0f982d-cb97-4c63-b8e5-af47421200c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.872196] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525559a9-be8d-8a53-e18c-28d416c70f44, 'name': SearchDatastore_Task, 'duration_secs': 0.03742} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.872468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.872768] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e/1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.872987] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdd9946e-42ef-4793-b7b9-67765e547a59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.880788] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1195.880788] env[69994]: value = "task-2926395" [ 1195.880788] env[69994]: _type = "Task" [ 1195.880788] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.888886] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.898733] env[69994]: DEBUG nova.scheduler.client.report [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.901897] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9d146d57-9948-4b18-a3f3-675b53d137ed] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1195.907102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.907702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.907873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.908716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072cd5a9-0955-4e7d-a62c-f9f9e1d344c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.925936] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1195.926208] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1195.926369] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1195.926552] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1195.926694] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1195.926839] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1195.927063] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1195.927227] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1195.927403] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1195.927557] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1195.927734] env[69994]: DEBUG nova.virt.hardware [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1195.935460] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1195.939869] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45abb234-555c-45ac-9107-669548dbae84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.957758] env[69994]: DEBUG oslo_vmware.api [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1195.957758] env[69994]: value = "task-2926396" [ 1195.957758] env[69994]: _type = "Task" [ 1195.957758] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.966117] env[69994]: DEBUG oslo_vmware.api [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926396, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.045358] env[69994]: DEBUG nova.network.neutron [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updated VIF entry in instance network info cache for port 795b02e6-feba-409a-ad9f-5932d55da938. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.045772] env[69994]: DEBUG nova.network.neutron [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating instance_info_cache with network_info: [{"id": "795b02e6-feba-409a-ad9f-5932d55da938", "address": "fa:16:3e:7c:ae:c1", "network": {"id": "239936ed-567a-4d6a-b1d2-2080ac2f2ea1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-916236539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d72179a46b64984b9ef219161bfcd76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53915f38-d7a0-42ec-8b30-1eacfb2cc379", "external-id": "nsx-vlan-transportzone-928", "segmentation_id": 928, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap795b02e6-fe", "ovs_interfaceid": "795b02e6-feba-409a-ad9f-5932d55da938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.064023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.064422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.392244] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926395, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.405910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.269s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.405910] env[69994]: INFO nova.compute.manager [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Migrating [ 1196.413158] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: b4c6b628-426e-4efc-b8b6-0c2937ef6df3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1196.473448] env[69994]: DEBUG oslo_vmware.api [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.548893] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a5c3b25-07ae-4fd3-8d9b-9312a711b350 req-74c47b42-35a5-46ac-8485-b1d2434f9d37 service nova] Releasing lock "refresh_cache-7963eb9f-66a1-417b-928b-3b5cef7847be" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.567798] env[69994]: DEBUG nova.compute.utils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1196.891450] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701219} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.891695] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e/1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.891911] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.892215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76515ebb-11da-43f4-b71b-95d31aa8f667 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.898116] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1196.898116] env[69994]: value = "task-2926397" [ 1196.898116] env[69994]: _type = "Task" [ 1196.898116] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.906532] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.923351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.923520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.923684] env[69994]: DEBUG nova.network.neutron [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.925068] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 234c2683-80f3-4f29-bcc9-9853338128bd] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1196.969304] env[69994]: DEBUG oslo_vmware.api [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926396, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.071382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.407943] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.324278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.408321] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.408975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9493a0-840a-4aa5-bb16-d6a28969f547 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.430159] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e/1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.432442] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 4cc99b2f-2d75-4a98-ac02-6b609e0c31d6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1197.434115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8d28eef-1dc5-4014-8c1c-14967b1a2abb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.453368] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1197.453368] env[69994]: value = "task-2926398" [ 1197.453368] env[69994]: _type = "Task" [ 1197.453368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.461463] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.468544] env[69994]: DEBUG oslo_vmware.api [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926396, 'name': ReconfigVM_Task, 'duration_secs': 1.040005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.468996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.469221] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1197.656765] env[69994]: DEBUG nova.network.neutron [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.949236] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f0a4e0cd-d0cd-45d8-8eca-4bdf8973fae8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1197.974351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-99f3ee0f-968c-476a-9808-af6195cfea77 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.174s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.977532] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.133126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.133462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.133716] env[69994]: INFO nova.compute.manager [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Attaching volume 722872ee-b34e-4f98-a7cb-04d35102032b to /dev/sdb [ 1198.159642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.169179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d4d953-ead7-4b5e-9364-eee6b00fdd2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.178819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8f57fe-9fa4-4a5f-bece-e16036383b81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.192276] env[69994]: DEBUG nova.virt.block_device [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating existing volume attachment record: 61c82e8a-e24d-4e6f-9115-2d75df303ed9 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1198.453248] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: c06a2540-e77d-48c0-967f-94e2a53c4d8f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1198.463975] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926398, 'name': ReconfigVM_Task, 'duration_secs': 0.910323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.464267] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e/1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.464887] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5eedb5c2-ad02-4dbe-99ce-19ca47ec78d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.471602] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1198.471602] env[69994]: value = "task-2926400" [ 1198.471602] env[69994]: _type = "Task" [ 1198.471602] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.479876] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926400, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.669464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.669464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.669608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.669778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.669997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.672150] env[69994]: INFO nova.compute.manager [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Terminating instance [ 1198.959782] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: f1f0d79f-dc67-4cf9-816c-c451f20d65ca] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1198.982847] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926400, 'name': Rename_Task, 'duration_secs': 0.141813} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.983953] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.984326] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48fbfacc-f1a4-405e-ac12-2fffe8b2d1fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.991892] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1198.991892] env[69994]: value = "task-2926403" [ 1198.991892] env[69994]: _type = "Task" [ 1198.991892] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.000609] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.175599] env[69994]: DEBUG nova.compute.manager [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1199.175814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1199.176880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20028e1e-871c-4211-8f8c-4c68c0c4f2d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.184422] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1199.185343] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c0082b3-0da1-449d-b473-f918c6c90b98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.191862] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1199.191862] env[69994]: value = "task-2926404" [ 1199.191862] env[69994]: _type = "Task" [ 1199.191862] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.200216] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.463773] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9b6aca3c-337b-4067-80e0-487d956fabc7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1199.502208] env[69994]: DEBUG oslo_vmware.api [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926403, 'name': PowerOnVM_Task, 'duration_secs': 0.464004} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.502483] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.502681] env[69994]: INFO nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Took 8.52 seconds to spawn the instance on the hypervisor. [ 1199.502859] env[69994]: DEBUG nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.503695] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b26b81a-4c89-48ba-be72-ca31c4e1a270 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.519940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.520186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.681458] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85d76a5-499d-4d7d-9990-86af0ba6e127 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.707206] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1199.722298] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926404, 'name': PowerOffVM_Task, 'duration_secs': 0.170632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.722298] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1199.722441] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1199.722659] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fb188fd-1e82-44e3-b414-61ed9107b3e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.788284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1199.788550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1199.788710] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleting the datastore file [datastore1] 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.789020] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82f5a497-fdb3-466f-a07c-ee8b63991c0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.795359] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for the task: (returnval){ [ 1199.795359] env[69994]: value = "task-2926406" [ 1199.795359] env[69994]: _type = "Task" [ 1199.795359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.802836] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926406, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.966822] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ab320e59-febb-4f8f-9bc4-74227d29c752] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1200.022555] env[69994]: INFO nova.compute.manager [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Took 13.34 seconds to build instance. [ 1200.022555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.022555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.023608] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009507b2-392b-44ca-803b-b40107a9b4bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.043431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc70217-f4bd-49a9-8772-07fcb11eb999 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.070174] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1200.070469] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-437085b8-9dd5-4029-8a70-01a9030db5d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.089636] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1200.089636] env[69994]: value = "task-2926407" [ 1200.089636] env[69994]: _type = "Task" [ 1200.089636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.097248] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.219276] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.219276] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d88d58af-3c2a-41f7-9d43-d601a8c59b9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.227978] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1200.227978] env[69994]: value = "task-2926408" [ 1200.227978] env[69994]: _type = "Task" [ 1200.227978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.236369] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.308023] env[69994]: DEBUG oslo_vmware.api [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Task: {'id': task-2926406, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.508282} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.308023] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.308023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1200.308023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1200.308023] env[69994]: INFO nova.compute.manager [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1200.308023] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1200.308023] env[69994]: DEBUG nova.compute.manager [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1200.308023] env[69994]: DEBUG nova.network.neutron [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1200.474258] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: b00d09ea-5eee-47ed-adcb-288cdd362e89] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1200.523466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f40ac02b-cbd4-4ff6-80ec-e86f2a703c19 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.849s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.599828] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.665705] env[69994]: DEBUG nova.compute.manager [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Received event network-changed-e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.665805] env[69994]: DEBUG nova.compute.manager [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Refreshing instance network info cache due to event network-changed-e4d88c42-18f3-404a-8d4a-68852d25e55f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1200.666047] env[69994]: DEBUG oslo_concurrency.lockutils [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] Acquiring lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.666187] env[69994]: DEBUG oslo_concurrency.lockutils [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] Acquired lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.666346] env[69994]: DEBUG nova.network.neutron [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Refreshing network info cache for port e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.737805] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926408, 'name': PowerOffVM_Task, 'duration_secs': 0.192791} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.738280] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1200.739191] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.978122] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 2358d8f6-7fbc-4f30-93ad-27f4d96aefa7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1201.102535] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.191109] env[69994]: DEBUG nova.network.neutron [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.246451] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.246709] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.246867] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.247142] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.247216] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.247356] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.247554] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.247706] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.247867] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.248037] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.248216] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.253813] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0db1c6eb-51e4-4b1d-9c94-be0e1cbc9e0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.272237] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1201.272237] env[69994]: value = "task-2926410" [ 1201.272237] env[69994]: _type = "Task" [ 1201.272237] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.281596] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.444538] env[69994]: DEBUG nova.network.neutron [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updated VIF entry in instance network info cache for port e4d88c42-18f3-404a-8d4a-68852d25e55f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.444973] env[69994]: DEBUG nova.network.neutron [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating instance_info_cache with network_info: [{"id": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "address": "fa:16:3e:13:c9:68", "network": {"id": "75f691f8-2853-4a39-bfdb-081341871a53", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1828741811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e63c47302d14d849b239a91580a25ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4d88c42-18", "ovs_interfaceid": "e4d88c42-18f3-404a-8d4a-68852d25e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.481849] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9e9973e1-feb8-4fd7-95ae-e6d824af5a64] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1201.602616] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.696033] env[69994]: INFO nova.compute.manager [-] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Took 1.39 seconds to deallocate network for instance. [ 1201.780394] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926410, 'name': ReconfigVM_Task, 'duration_secs': 0.17398} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.780733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.947389] env[69994]: DEBUG oslo_concurrency.lockutils [req-717531cf-00b6-4837-b7f5-8e8d6a18b229 req-3565c24f-949b-4ab3-980d-15544ed1e537 service nova] Releasing lock "refresh_cache-1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.984816] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 627f89ad-0381-4de9-a429-c74e26975ce9] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1202.102811] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.201953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.202300] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.202577] env[69994]: DEBUG nova.objects.instance [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lazy-loading 'resources' on Instance uuid 85293c91-f363-4085-9eb8-2bf6514fa2f1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.286951] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.287243] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.287417] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.287600] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.287747] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.287917] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.288152] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.288314] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.288481] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.288692] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.288922] env[69994]: DEBUG nova.virt.hardware [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.294485] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1202.294802] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32c47a81-610c-4cc9-b366-7eb13a5f04b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.314438] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1202.314438] env[69994]: value = "task-2926411" [ 1202.314438] env[69994]: _type = "Task" [ 1202.314438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.323594] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.487765] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 298a4d59-733f-4cda-a9c2-80dc21be91ca] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1202.603481] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.690026] env[69994]: DEBUG nova.compute.manager [req-ee65abf5-99a3-493b-aff6-c119d3472edf req-3604a8ad-52c6-4964-8297-7d7c94d26f91 service nova] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Received event network-vif-deleted-dcfa2bab-5c2c-4927-9b41-0d494d7f4a3b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1202.827080] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926411, 'name': ReconfigVM_Task, 'duration_secs': 0.158696} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.827080] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1202.828203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d889dac-d3b9-43c9-8b9d-de4116600bb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.851468] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1202.854105] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4cc855f-188f-4722-a20d-a1b4c01ecb07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.872264] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1202.872264] env[69994]: value = "task-2926412" [ 1202.872264] env[69994]: _type = "Task" [ 1202.872264] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.877478] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28daa410-6bbb-4b5b-a5fb-3dabea3e6d72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.882957] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.887372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53dc2b1-7caf-45c0-ab69-c63e9a3f431e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.916407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8529fcfe-5225-4591-9e52-d68fd358e768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.923377] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825a0fde-7279-48c1-ba25-cd690d79063d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.935983] env[69994]: DEBUG nova.compute.provider_tree [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.991695] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 63d6a59a-d58c-4179-ad39-eb9863e6f84c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1203.105073] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.238164] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1203.238564] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587652', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'name': 'volume-722872ee-b34e-4f98-a7cb-04d35102032b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a828caf9-2b61-4449-b1ee-25f0828380d1', 'attached_at': '', 'detached_at': '', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'serial': '722872ee-b34e-4f98-a7cb-04d35102032b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1203.239602] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3bb351-9b6b-4d57-addc-ca8d3a8fbcd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.256138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab893a-685f-4346-88c7-79c4f923dc1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.281204] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-722872ee-b34e-4f98-a7cb-04d35102032b/volume-722872ee-b34e-4f98-a7cb-04d35102032b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.281874] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-855c7d5c-9eda-4e1b-b20f-50e0b7322337 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.300305] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1203.300305] env[69994]: value = "task-2926413" [ 1203.300305] env[69994]: _type = "Task" [ 1203.300305] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.308277] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.381941] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926412, 'name': ReconfigVM_Task, 'duration_secs': 0.358407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.382433] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Reconfigured VM instance instance-0000006f to attach disk [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d/fe716314-1b5d-4b05-b34d-dfd444ed0c8d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.382802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.439604] env[69994]: DEBUG nova.scheduler.client.report [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1203.494474] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.494864] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1203.604493] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.811043] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926413, 'name': ReconfigVM_Task, 'duration_secs': 0.341669} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.811510] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-722872ee-b34e-4f98-a7cb-04d35102032b/volume-722872ee-b34e-4f98-a7cb-04d35102032b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.816274] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9afe576d-5e5b-4e1e-a15f-6f47a8305100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.831495] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1203.831495] env[69994]: value = "task-2926414" [ 1203.831495] env[69994]: _type = "Task" [ 1203.831495] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.839014] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926414, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.890496] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc1204e-1482-43ce-b312-b843df80e506 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.908866] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56cca92-f05a-46f2-a075-cbcebd7e0657 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.927381] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.945906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.963789] env[69994]: INFO nova.scheduler.client.report [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Deleted allocations for instance 85293c91-f363-4085-9eb8-2bf6514fa2f1 [ 1203.996929] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.104214] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.341859] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926414, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.465788] env[69994]: DEBUG nova.network.neutron [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Port 39e82227-f1d4-4f42-8137-5212b739413c binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1204.470832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b3aa741d-6a0d-42d6-8058-ea39d4780172 tempest-ServersNegativeTestJSON-216427101 tempest-ServersNegativeTestJSON-216427101-project-member] Lock "85293c91-f363-4085-9eb8-2bf6514fa2f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.801s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.605665] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.842798] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926414, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.106501] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.343836] env[69994]: DEBUG oslo_vmware.api [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926414, 'name': ReconfigVM_Task, 'duration_secs': 1.142293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.344300] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587652', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'name': 'volume-722872ee-b34e-4f98-a7cb-04d35102032b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a828caf9-2b61-4449-b1ee-25f0828380d1', 'attached_at': '', 'detached_at': '', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'serial': '722872ee-b34e-4f98-a7cb-04d35102032b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1205.487071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.487399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.487500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.606728] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.107454] env[69994]: DEBUG oslo_vmware.api [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926407, 'name': ReconfigVM_Task, 'duration_secs': 5.779962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.107708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.107923] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1206.383983] env[69994]: DEBUG nova.objects.instance [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'flavor' on Instance uuid a828caf9-2b61-4449-b1ee-25f0828380d1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.523459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.523658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.523840] env[69994]: DEBUG nova.network.neutron [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.888531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-05601c7c-c7d0-4438-9069-6930083ee242 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.755s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.217249] env[69994]: DEBUG nova.network.neutron [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.349357] env[69994]: DEBUG nova.compute.manager [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1207.482027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.482412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquired lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.482695] env[69994]: DEBUG nova.network.neutron [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.720084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.871303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.871602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.952667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.953027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.953116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.953303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.953469] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.955624] env[69994]: INFO nova.compute.manager [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Terminating instance [ 1208.178610] env[69994]: INFO nova.network.neutron [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Port 1c0f982d-cb97-4c63-b8e5-af47421200c1 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1208.178993] env[69994]: DEBUG nova.network.neutron [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [{"id": "7608b3ed-dbf1-48c0-a088-071f08980220", "address": "fa:16:3e:78:4f:1d", "network": {"id": "dd1b2bd1-78a1-476c-ae3e-aaffd2cfbb9c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-828229638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66d57a69e0924b9abc2cc4e67fc8173c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7608b3ed-db", "ovs_interfaceid": "7608b3ed-dbf1-48c0-a088-071f08980220", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.240653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4945d037-6e16-4fa4-9c0a-91d78c5d7342 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.262022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f3b1a8-d68c-47e3-8b2d-29ebc5fe7e37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.268685] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.376148] env[69994]: INFO nova.compute.claims [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1208.459717] env[69994]: DEBUG nova.compute.manager [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1208.459948] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.461088] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbdfe44-78d0-4a42-a5b2-4d69afa7af4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.468738] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1208.468975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81e51a26-011d-4536-bb9b-8a5e3427025e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.478897] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1208.478897] env[69994]: value = "task-2926415" [ 1208.478897] env[69994]: _type = "Task" [ 1208.478897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.486854] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.682020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Releasing lock "refresh_cache-395a4d39-29ae-4443-949f-4737e7e2341e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.775048] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.775364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0484012e-ac61-41a5-b551-f30d5af756e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.782847] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1208.782847] env[69994]: value = "task-2926416" [ 1208.782847] env[69994]: _type = "Task" [ 1208.782847] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.790697] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.882448] env[69994]: INFO nova.compute.resource_tracker [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating resource usage from migration 24848846-dcfd-4ba5-a1c5-4836e1411fd8 [ 1208.989915] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926415, 'name': PowerOffVM_Task, 'duration_secs': 0.234459} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.990223] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1208.990407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1208.990673] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22a4780f-e50e-498e-9589-2455499c84b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.034288] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd8c9f2-9321-4551-8071-af55f17c9255 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.042351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3ca8b4-6f09-4982-9bc4-e343be1ff715 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.077538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac2f388-3d3a-4bf2-805d-bbf189dd5faa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.081415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.081632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.081810] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleting the datastore file [datastore1] 395a4d39-29ae-4443-949f-4737e7e2341e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.082100] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7836e82-f5a6-49bd-80fb-27aa24ddbca0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.089280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e9bb61-03fe-47e8-8712-a70e74a81d6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.093270] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1209.093270] env[69994]: value = "task-2926418" [ 1209.093270] env[69994]: _type = "Task" [ 1209.093270] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.105519] env[69994]: DEBUG nova.compute.provider_tree [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.112121] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.185711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5fdfa05-4ef0-4bd1-9404-2f67d364b08b tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "interface-395a4d39-29ae-4443-949f-4737e7e2341e-1c0f982d-cb97-4c63-b8e5-af47421200c1" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.665s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.292289] env[69994]: DEBUG oslo_vmware.api [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926416, 'name': PowerOnVM_Task, 'duration_secs': 0.363247} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.292560] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.292743] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea50f297-9691-4a23-b988-9fb74a7a649d tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance 'fe716314-1b5d-4b05-b34d-dfd444ed0c8d' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1209.602931] env[69994]: DEBUG oslo_vmware.api [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152432} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.603222] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.603409] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1209.603585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1209.603758] env[69994]: INFO nova.compute.manager [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1209.604000] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1209.604199] env[69994]: DEBUG nova.compute.manager [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1209.604293] env[69994]: DEBUG nova.network.neutron [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1209.608757] env[69994]: DEBUG nova.scheduler.client.report [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1210.113774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.242s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.114153] env[69994]: INFO nova.compute.manager [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Migrating [ 1210.271220] env[69994]: DEBUG nova.compute.manager [req-2ab7df9c-c12c-467b-bc00-ac9e2674ce63 req-f65e2638-2b0c-48f8-bdaa-2a1909580bf2 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Received event network-vif-deleted-7608b3ed-dbf1-48c0-a088-071f08980220 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1210.271428] env[69994]: INFO nova.compute.manager [req-2ab7df9c-c12c-467b-bc00-ac9e2674ce63 req-f65e2638-2b0c-48f8-bdaa-2a1909580bf2 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Neutron deleted interface 7608b3ed-dbf1-48c0-a088-071f08980220; detaching it from the instance and deleting it from the info cache [ 1210.271549] env[69994]: DEBUG nova.network.neutron [req-2ab7df9c-c12c-467b-bc00-ac9e2674ce63 req-f65e2638-2b0c-48f8-bdaa-2a1909580bf2 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.629510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.629699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.629864] env[69994]: DEBUG nova.network.neutron [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1210.747259] env[69994]: DEBUG nova.network.neutron [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.776094] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15d30d90-cf06-4a82-9341-20090f846658 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.788029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62916033-b5aa-4cf1-b20f-6fe42a49d78b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.822117] env[69994]: DEBUG nova.compute.manager [req-2ab7df9c-c12c-467b-bc00-ac9e2674ce63 req-f65e2638-2b0c-48f8-bdaa-2a1909580bf2 service nova] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Detach interface failed, port_id=7608b3ed-dbf1-48c0-a088-071f08980220, reason: Instance 395a4d39-29ae-4443-949f-4737e7e2341e could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1211.250096] env[69994]: INFO nova.compute.manager [-] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Took 1.65 seconds to deallocate network for instance. [ 1211.354213] env[69994]: DEBUG nova.network.neutron [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.758574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.758947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.759227] env[69994]: DEBUG nova.objects.instance [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'resources' on Instance uuid 395a4d39-29ae-4443-949f-4737e7e2341e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.857344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.163086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.163260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.163429] env[69994]: DEBUG nova.compute.manager [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Going to confirm migration 6 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1212.395728] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb40d6e7-3069-4128-b244-0dafb4c17e47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.403853] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f91897-d812-4265-94a7-c04bddbc4c1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.433813] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3a048b-ccbc-4d4d-8848-83123b4d2465 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.441602] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c697b91-d45d-4dea-b650-e613d71854da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.455955] env[69994]: DEBUG nova.compute.provider_tree [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.566780] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.566967] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.718240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.718337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquired lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.718517] env[69994]: DEBUG nova.network.neutron [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.718699] env[69994]: DEBUG nova.objects.instance [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'info_cache' on Instance uuid fe716314-1b5d-4b05-b34d-dfd444ed0c8d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.959596] env[69994]: DEBUG nova.scheduler.client.report [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.073138] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.073391] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.073600] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.073790] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.073965] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.074158] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.371981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19616de9-23a3-4b5d-b346-03059e6f6646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.393329] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1213.464822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.498859] env[69994]: INFO nova.scheduler.client.report [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted allocations for instance 395a4d39-29ae-4443-949f-4737e7e2341e [ 1213.577958] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Getting list of instances from cluster (obj){ [ 1213.577958] env[69994]: value = "domain-c8" [ 1213.577958] env[69994]: _type = "ClusterComputeResource" [ 1213.577958] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1213.579018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7983e7f4-159d-43de-9049-c5ee0bfa24c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.599063] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Got total of 9 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1213.599236] env[69994]: WARNING nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] While synchronizing instance power states, found 10 instances in the database and 9 instances on the hypervisor. [ 1213.599374] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 8001cb13-6a52-451b-b4b6-57b893975079 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.599560] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.599713] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.599858] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600010] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 87c5b8e4-166c-44b9-a179-1afaef751434 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600173] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 395a4d39-29ae-4443-949f-4737e7e2341e {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600318] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid a828caf9-2b61-4449-b1ee-25f0828380d1 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600459] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid fe716314-1b5d-4b05-b34d-dfd444ed0c8d {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600602] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 7963eb9f-66a1-417b-928b-3b5cef7847be {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.600742] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Triggering sync for uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1213.601099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "8001cb13-6a52-451b-b4b6-57b893975079" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.601330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "8001cb13-6a52-451b-b4b6-57b893975079" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.601589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.601828] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.602132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.602324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.602545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.602716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.602930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.603114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.603335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "395a4d39-29ae-4443-949f-4737e7e2341e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.603534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.603700] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.603855] env[69994]: INFO nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1213.604013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.604204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.604400] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.604568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.604780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.604980] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.605199] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.605332] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1213.606022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa56df7-69c2-4e31-8954-f84e82243413 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.609082] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc72168-25d1-483f-a49c-50318b280df3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.611734] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcfd9f1-6107-4d16-8616-4f1f78a7c860 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.614422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4813a08f-5d71-4624-86dd-7184ad87e2ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.617032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f6e241-720e-4e16-aeaf-dfa56688c44c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.619708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc26f72-aeea-48dc-b294-6ce4fa5868b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.622994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef99d3e-2507-4121-9749-149a68dbbb0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.625621] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.898610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.898952] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cffe01d7-cf63-499e-b691-693c1e80206c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.905801] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1213.905801] env[69994]: value = "task-2926419" [ 1213.905801] env[69994]: _type = "Task" [ 1213.905801] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.913732] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.954196] env[69994]: DEBUG nova.network.neutron [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [{"id": "39e82227-f1d4-4f42-8137-5212b739413c", "address": "fa:16:3e:d7:2f:7d", "network": {"id": "1c815442-d1b8-4bd4-9946-6ab768560d4e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1606442918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0f5bb040f474df19739d5170639ff67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39e82227-f1", "ovs_interfaceid": "39e82227-f1d4-4f42-8137-5212b739413c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.005274] env[69994]: DEBUG oslo_concurrency.lockutils [None req-adaf66b6-1ee1-4696-b668-d3c742227cef tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.052s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.006172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.403s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.006622] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-221f0f5b-3954-4ae5-9c82-dd453b1128c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.024947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cf790a-a49c-4592-b1ac-93870c5e5cd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.130555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.130843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.131144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.131382] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1214.132369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21640d38-020a-4c82-b34c-38292febf94d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.143016] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e8d9ff-e963-435f-85bc-faa596ef92cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.149238] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.149780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.150246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "8001cb13-6a52-451b-b4b6-57b893975079" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.150706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.151666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.152106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.152638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.166646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97af5c4-aaf3-47d5-80c6-e7a0511d72f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.176026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757115d7-e9fa-4153-9bc5-5858ab90c1d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.207272] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179911MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1214.207428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.207634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.416056] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926419, 'name': PowerOffVM_Task, 'duration_secs': 0.241797} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.416314] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.416501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1214.456895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Releasing lock "refresh_cache-fe716314-1b5d-4b05-b34d-dfd444ed0c8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.457212] env[69994]: DEBUG nova.objects.instance [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lazy-loading 'migration_context' on Instance uuid fe716314-1b5d-4b05-b34d-dfd444ed0c8d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.563429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "395a4d39-29ae-4443-949f-4737e7e2341e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.741108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.741447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.741666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.741847] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.742028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.744365] env[69994]: INFO nova.compute.manager [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Terminating instance [ 1214.924222] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.924502] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.924662] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.924841] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.924989] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.925150] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.925353] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.925509] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.925670] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.925832] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.926009] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.934350] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b03cb558-3bb0-474b-a70b-8b1da5773824 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.947686] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.947889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.955173] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1214.955173] env[69994]: value = "task-2926420" [ 1214.955173] env[69994]: _type = "Task" [ 1214.955173] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.960039] env[69994]: DEBUG nova.objects.base [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1214.960651] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be42c4ad-a261-4631-ab97-2e8744a3423c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.968358] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926420, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.983595] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e666e66c-e622-4383-83ec-b323d8faebe4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.989251] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1214.989251] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524c5a23-fdb4-1a2b-257e-edd672fc75a6" [ 1214.989251] env[69994]: _type = "Task" [ 1214.989251] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.997103] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524c5a23-fdb4-1a2b-257e-edd672fc75a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.219649] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Applying migration context for instance a828caf9-2b61-4449-b1ee-25f0828380d1 as it has an incoming, in-progress migration 24848846-dcfd-4ba5-a1c5-4836e1411fd8. Migration status is migrating {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1215.219649] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Applying migration context for instance fe716314-1b5d-4b05-b34d-dfd444ed0c8d as it has an incoming, in-progress migration 4acf8ba3-1369-4244-be25-3e969ceeb501. Migration status is finished {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1215.220851] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating resource usage from migration 24848846-dcfd-4ba5-a1c5-4836e1411fd8 [ 1215.221355] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating resource usage from migration 4acf8ba3-1369-4244-be25-3e969ceeb501 [ 1215.248212] env[69994]: DEBUG nova.compute.manager [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1215.248478] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1215.249511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396d8fbd-3527-4bde-82d3-12436ba60ac9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.257262] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.258149] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258290] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258410] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258524] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 87c5b8e4-166c-44b9-a179-1afaef751434 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258635] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 3c374550-d65b-494a-89d7-60720f6b44dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258744] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 7963eb9f-66a1-417b-928b-3b5cef7847be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.258900] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.259048] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration 4acf8ba3-1369-4244-be25-3e969ceeb501 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1215.259164] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance fe716314-1b5d-4b05-b34d-dfd444ed0c8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.259272] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration 24848846-dcfd-4ba5-a1c5-4836e1411fd8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1215.259378] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance a828caf9-2b61-4449-b1ee-25f0828380d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.259596] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.259732] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1215.261881] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50e0b551-e24b-4d1b-b7ba-e0ba9deaed93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.268172] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1215.268172] env[69994]: value = "task-2926421" [ 1215.268172] env[69994]: _type = "Task" [ 1215.268172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.275644] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.414035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b686a0-fbfb-468a-96bb-f6a260ac67e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.421893] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e034d7-a9f8-4a1b-b80a-03e925f6ab28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.453803] env[69994]: INFO nova.compute.manager [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Detaching volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 [ 1215.456756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787aa31b-ee30-4c6c-9481-b838ad4e8b7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.472386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56bbb80-4549-4f19-9c1b-e99501bae6c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.476668] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926420, 'name': ReconfigVM_Task, 'duration_secs': 0.162235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.476976] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1215.489989] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.492078] env[69994]: INFO nova.virt.block_device [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Attempting to driver detach volume e95e0bf4-fc79-440d-84e6-8467a47b5cc4 from mountpoint /dev/sdb [ 1215.492295] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1215.492482] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1215.493316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7290b7bf-3651-41f6-ad98-832e69e60125 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.520055] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524c5a23-fdb4-1a2b-257e-edd672fc75a6, 'name': SearchDatastore_Task, 'duration_secs': 0.008168} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.520444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.521243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8c65dd-f6f0-4bd5-8bcd-2bdb9484636a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.527647] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cb8bd9-d943-4a13-84e0-5543abde4592 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.547770] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db830cf3-b097-475c-a12c-cf6798333a44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.564516] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] The volume has not been displaced from its original location: [datastore2] volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4/volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1215.569736] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1215.570240] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b605b93-0f27-484d-a0ef-7f8b0ef73c34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.587758] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1215.587758] env[69994]: value = "task-2926422" [ 1215.587758] env[69994]: _type = "Task" [ 1215.587758] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.595406] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.778022] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926421, 'name': PowerOffVM_Task, 'duration_secs': 0.231632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.778271] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1215.778413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1215.778665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b71441be-6891-422b-85c9-75840452cbfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.842653] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1215.842899] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1215.843064] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleting the datastore file [datastore2] 87c5b8e4-166c-44b9-a179-1afaef751434 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1215.843344] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4b54314-7e57-4731-a367-5aac2fa891e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.849656] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for the task: (returnval){ [ 1215.849656] env[69994]: value = "task-2926424" [ 1215.849656] env[69994]: _type = "Task" [ 1215.849656] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.857273] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.984437] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.984757] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.984941] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.985173] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.985361] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.985550] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.985793] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.985990] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.986216] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.986416] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.986639] env[69994]: DEBUG nova.virt.hardware [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.993255] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1215.993255] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff32c16e-1488-4613-98be-7ddb7a475721 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.015413] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.024592] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1216.024592] env[69994]: value = "task-2926425" [ 1216.024592] env[69994]: _type = "Task" [ 1216.024592] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.032971] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.097809] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926422, 'name': ReconfigVM_Task, 'duration_secs': 0.214936} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.098109] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1216.103114] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4533f52f-3bb5-4da5-883f-b93b45a8976a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.116846] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1216.116846] env[69994]: value = "task-2926426" [ 1216.116846] env[69994]: _type = "Task" [ 1216.116846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.124508] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.359275] env[69994]: DEBUG oslo_vmware.api [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Task: {'id': task-2926424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150673} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.359543] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1216.359726] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1216.359900] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1216.360091] env[69994]: INFO nova.compute.manager [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1216.360355] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1216.360546] env[69994]: DEBUG nova.compute.manager [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1216.360644] env[69994]: DEBUG nova.network.neutron [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1216.521605] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1216.521826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.314s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.522263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.533358] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926425, 'name': ReconfigVM_Task, 'duration_secs': 0.180672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.533630] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1216.534427] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31912cba-2c92-43b5-b4f8-6ee0d7d2f9fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.559627] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.560092] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc54f15c-1428-4682-b015-57c626aba101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.577452] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1216.577452] env[69994]: value = "task-2926427" [ 1216.577452] env[69994]: _type = "Task" [ 1216.577452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.585102] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.629962] env[69994]: DEBUG oslo_vmware.api [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926426, 'name': ReconfigVM_Task, 'duration_secs': 0.134097} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.630304] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587638', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'name': 'volume-e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '60f6d502-0fef-4764-8c1f-1b1d5ab3db41', 'attached_at': '', 'detached_at': '', 'volume_id': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4', 'serial': 'e95e0bf4-fc79-440d-84e6-8467a47b5cc4'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1216.782520] env[69994]: DEBUG nova.compute.manager [req-805d80bf-3a4d-441c-9f2a-84ef619a5905 req-3f397ef5-47fa-4a1c-b0a3-e842de141667 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Received event network-vif-deleted-be3723ea-e18d-4908-bb9b-d8bbce5d3cee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1216.782520] env[69994]: INFO nova.compute.manager [req-805d80bf-3a4d-441c-9f2a-84ef619a5905 req-3f397ef5-47fa-4a1c-b0a3-e842de141667 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Neutron deleted interface be3723ea-e18d-4908-bb9b-d8bbce5d3cee; detaching it from the instance and deleting it from the info cache [ 1216.782520] env[69994]: DEBUG nova.network.neutron [req-805d80bf-3a4d-441c-9f2a-84ef619a5905 req-3f397ef5-47fa-4a1c-b0a3-e842de141667 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.087826] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926427, 'name': ReconfigVM_Task, 'duration_secs': 0.291034} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.091058] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.091058] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1217.201944] env[69994]: DEBUG nova.objects.instance [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'flavor' on Instance uuid 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.247085] env[69994]: DEBUG nova.network.neutron [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.285762] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-820a7a73-fb78-476d-b931-3f8f7775303a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.296108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b0941e-ff05-4ada-9326-db9255253c51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.331351] env[69994]: DEBUG nova.compute.manager [req-805d80bf-3a4d-441c-9f2a-84ef619a5905 req-3f397ef5-47fa-4a1c-b0a3-e842de141667 service nova] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Detach interface failed, port_id=be3723ea-e18d-4908-bb9b-d8bbce5d3cee, reason: Instance 87c5b8e4-166c-44b9-a179-1afaef751434 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1217.333173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea8945e-6b26-4e67-813a-608fc925a510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.340253] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dfe6e1-1181-46d3-9929-a4486a728eac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.373094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3447f3c-d235-4a40-9ab7-90b8baad81a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.380084] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8870a6d4-a32a-4585-90b0-e916be913eb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.393756] env[69994]: DEBUG nova.compute.provider_tree [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.598122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096a053b-9b71-4b27-b9bf-2d524aa2193d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.622881] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f53f81-7ac0-4087-8c45-d492336cda11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.646815] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1217.749681] env[69994]: INFO nova.compute.manager [-] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Took 1.39 seconds to deallocate network for instance. [ 1217.780754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.781056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.781309] env[69994]: INFO nova.compute.manager [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Rebooting instance [ 1217.896685] env[69994]: DEBUG nova.scheduler.client.report [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.208664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12749863-b43e-4dce-b76c-3cf6c55987b9 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.260s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.258035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.297711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.297907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.298093] env[69994]: DEBUG nova.network.neutron [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1218.815941] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.816381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.816381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.816514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.816664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.820549] env[69994]: INFO nova.compute.manager [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Terminating instance [ 1218.908384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.386s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.912043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.656s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.912043] env[69994]: DEBUG nova.objects.instance [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lazy-loading 'resources' on Instance uuid 87c5b8e4-166c-44b9-a179-1afaef751434 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.166853] env[69994]: DEBUG nova.network.neutron [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.325484] env[69994]: DEBUG nova.compute.manager [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1219.325702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.326654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d818e64-ae3b-4b2f-b2d0-a5122d38478d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.334625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1219.334625] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad2020c8-45a3-4dd2-83f2-fc8d4ce3539d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.341076] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1219.341076] env[69994]: value = "task-2926428" [ 1219.341076] env[69994]: _type = "Task" [ 1219.341076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.348803] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.361980] env[69994]: DEBUG nova.network.neutron [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1219.520460] env[69994]: INFO nova.scheduler.client.report [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocation for migration 4acf8ba3-1369-4244-be25-3e969ceeb501 [ 1219.583383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16ae72c-26d6-498b-84f2-3bd11ca38f45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.591043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20e812c-b4b8-4bbe-9e92-6e87c13c80a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.621618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85429e0b-1b6b-4836-b643-16cc75fef824 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.629598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188cfb07-bf25-4543-aa2b-264cf5d3e1bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.642411] env[69994]: DEBUG nova.compute.provider_tree [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.669228] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.850132] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926428, 'name': PowerOffVM_Task, 'duration_secs': 0.16213} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.850414] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1219.850568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1219.850810] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f128265b-1cc2-4337-a78a-46d5743675cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.915943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1219.915943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1219.916243] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore1] 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1219.916399] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11cbea9f-62a7-4887-a003-0c98a80f9f03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.924271] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1219.924271] env[69994]: value = "task-2926430" [ 1219.924271] env[69994]: _type = "Task" [ 1219.924271] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.932702] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.028230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.865s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.029952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.425s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.029952] env[69994]: INFO nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1220.029952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.151173] env[69994]: DEBUG nova.scheduler.client.report [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1220.172859] env[69994]: DEBUG nova.compute.manager [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1220.173750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e75437-bd2f-425c-82b0-74e7d107bfbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.384453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.384453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.384680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.435121] env[69994]: DEBUG oslo_vmware.api [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133922} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.436066] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.436066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.436066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.436066] env[69994]: INFO nova.compute.manager [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1220.436313] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1220.436313] env[69994]: DEBUG nova.compute.manager [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.436372] env[69994]: DEBUG nova.network.neutron [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1220.656036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.715384] env[69994]: INFO nova.scheduler.client.report [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Deleted allocations for instance 87c5b8e4-166c-44b9-a179-1afaef751434 [ 1221.166537] env[69994]: DEBUG nova.compute.manager [req-3e36a0cf-52e5-451a-9c2a-3e62a49d7d9f req-fb00ae51-29e2-42ea-b304-36ac2677d7e9 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Received event network-vif-deleted-29cc87df-3c6e-45eb-a80d-5127f53062e1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1221.166896] env[69994]: INFO nova.compute.manager [req-3e36a0cf-52e5-451a-9c2a-3e62a49d7d9f req-fb00ae51-29e2-42ea-b304-36ac2677d7e9 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Neutron deleted interface 29cc87df-3c6e-45eb-a80d-5127f53062e1; detaching it from the instance and deleting it from the info cache [ 1221.166980] env[69994]: DEBUG nova.network.neutron [req-3e36a0cf-52e5-451a-9c2a-3e62a49d7d9f req-fb00ae51-29e2-42ea-b304-36ac2677d7e9 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.189915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da12bd0-4474-4108-95ff-4038933c4532 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.193732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.194933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.194933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.194933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.194933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.198217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1221.198662] env[69994]: INFO nova.compute.manager [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Terminating instance [ 1221.199882] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-c2d75ac4-392d-4d84-9f18-ce02ae1cf4cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.209419] env[69994]: DEBUG oslo_vmware.api [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1221.209419] env[69994]: value = "task-2926431" [ 1221.209419] env[69994]: _type = "Task" [ 1221.209419] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.218475] env[69994]: DEBUG oslo_vmware.api [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926431, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.224366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-595f99a2-d10f-42a1-82f1-cfd8c48a0a85 tempest-AttachInterfacesTestJSON-1252867505 tempest-AttachInterfacesTestJSON-1252867505-project-member] Lock "87c5b8e4-166c-44b9-a179-1afaef751434" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.483s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.433554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.433751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.433931] env[69994]: DEBUG nova.network.neutron [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.645888] env[69994]: DEBUG nova.network.neutron [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.672450] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-202322e4-3cc4-4bce-947f-6dad5ca194df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.682343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e04dbe8-b685-4476-9fe3-efbc3f32d87e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.703201] env[69994]: DEBUG nova.compute.manager [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.703422] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.704392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf3372-3b60-4ad7-9d8f-139fdc2ee573 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.716141] env[69994]: DEBUG nova.compute.manager [req-3e36a0cf-52e5-451a-9c2a-3e62a49d7d9f req-fb00ae51-29e2-42ea-b304-36ac2677d7e9 service nova] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Detach interface failed, port_id=29cc87df-3c6e-45eb-a80d-5127f53062e1, reason: Instance 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1221.721658] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.722493] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a987af3e-e3f4-4f5b-a69a-008502f5542e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.731736] env[69994]: DEBUG oslo_vmware.api [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926431, 'name': ResetVM_Task, 'duration_secs': 0.086878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.731736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1221.731736] env[69994]: DEBUG nova.compute.manager [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1221.731736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5701d8-be5b-42d1-a07e-f0b88b7c5874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.732126] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1221.732126] env[69994]: value = "task-2926432" [ 1221.732126] env[69994]: _type = "Task" [ 1221.732126] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.744219] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.149015] env[69994]: INFO nova.compute.manager [-] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Took 1.71 seconds to deallocate network for instance. [ 1222.244749] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926432, 'name': PowerOffVM_Task, 'duration_secs': 0.172366} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.246843] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.247028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.247499] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04a2ff7c-bf88-44aa-b44d-ac618ae4d6f3 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.466s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.248282] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee8fb4d4-e865-46a1-9edd-172a7a950310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.310465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.310465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.310465] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleting the datastore file [datastore1] fe716314-1b5d-4b05-b34d-dfd444ed0c8d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.310606] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18e33b33-f365-4127-874a-ecad0d5cdce6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.317765] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for the task: (returnval){ [ 1222.317765] env[69994]: value = "task-2926434" [ 1222.317765] env[69994]: _type = "Task" [ 1222.317765] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.321472] env[69994]: DEBUG nova.network.neutron [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.327305] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.656890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.657223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.657454] env[69994]: DEBUG nova.objects.instance [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'resources' on Instance uuid 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.824351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.835546] env[69994]: DEBUG oslo_vmware.api [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Task: {'id': task-2926434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.835864] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.836071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.836269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.836445] env[69994]: INFO nova.compute.manager [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1222.836720] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.836922] env[69994]: DEBUG nova.compute.manager [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1222.837029] env[69994]: DEBUG nova.network.neutron [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.336147] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060ed7ce-d5f7-4b13-953f-184cd09aff45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.343546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f89bc4-9731-4b3e-9bcc-0a904f5ab80e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.349512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc43d2c-1204-4870-a0c6-5ae3e2b97b93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.363250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e7dc51-bcc3-4d3c-9076-7064ab49f186 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.402860] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4408c50f-4594-416a-ae56-97d00ad6d614 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.410468] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a24acc-75cf-4349-88d8-0548ae3e672c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.424133] env[69994]: DEBUG nova.compute.provider_tree [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.455018] env[69994]: DEBUG nova.compute.manager [req-58e35f23-6531-464e-be8a-de71ce5df882 req-61366585-90d3-4c20-a2f7-185f9e3984a2 service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Received event network-vif-deleted-39e82227-f1d4-4f42-8137-5212b739413c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1223.455644] env[69994]: INFO nova.compute.manager [req-58e35f23-6531-464e-be8a-de71ce5df882 req-61366585-90d3-4c20-a2f7-185f9e3984a2 service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Neutron deleted interface 39e82227-f1d4-4f42-8137-5212b739413c; detaching it from the instance and deleting it from the info cache [ 1223.455644] env[69994]: DEBUG nova.network.neutron [req-58e35f23-6531-464e-be8a-de71ce5df882 req-61366585-90d3-4c20-a2f7-185f9e3984a2 service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.929877] env[69994]: DEBUG nova.scheduler.client.report [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.934407] env[69994]: DEBUG nova.network.neutron [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.958183] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65cec77a-8112-4a68-b419-8931fda14684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.973051] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d879508e-a9e4-4dc4-8120-515f60e400fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.008421] env[69994]: DEBUG nova.compute.manager [req-58e35f23-6531-464e-be8a-de71ce5df882 req-61366585-90d3-4c20-a2f7-185f9e3984a2 service nova] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Detach interface failed, port_id=39e82227-f1d4-4f42-8137-5212b739413c, reason: Instance fe716314-1b5d-4b05-b34d-dfd444ed0c8d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1224.437587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.441426] env[69994]: INFO nova.compute.manager [-] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Took 1.60 seconds to deallocate network for instance. [ 1224.500692] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02f3035-90a2-4f91-8c6d-f3850802afad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.523083] env[69994]: INFO nova.scheduler.client.report [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocations for instance 60f6d502-0fef-4764-8c1f-1b1d5ab3db41 [ 1224.525142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c25c7d8-54a1-49ff-8b56-c2432525835f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.535026] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1224.948299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.948693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.948975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.004770] env[69994]: INFO nova.scheduler.client.report [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Deleted allocations for instance fe716314-1b5d-4b05-b34d-dfd444ed0c8d [ 1225.034028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cba4a82e-5695-40e3-833d-3eeda29bcdc5 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "60f6d502-0fef-4764-8c1f-1b1d5ab3db41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.218s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.040679] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1225.040988] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95db7e3b-74a2-454d-a48b-854876a0af52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.048320] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1225.048320] env[69994]: value = "task-2926435" [ 1225.048320] env[69994]: _type = "Task" [ 1225.048320] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.056361] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926435, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.514019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-097da06a-49e6-49eb-9e3b-b1dd81f55b07 tempest-DeleteServersTestJSON-409940019 tempest-DeleteServersTestJSON-409940019-project-member] Lock "fe716314-1b5d-4b05-b34d-dfd444ed0c8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.320s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.560463] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926435, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.061028] env[69994]: DEBUG oslo_vmware.api [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926435, 'name': PowerOnVM_Task, 'duration_secs': 0.729878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.061141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1226.061537] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0784d1-3bd2-4afb-81f4-63ce883b60af tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance 'a828caf9-2b61-4449-b1ee-25f0828380d1' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1226.416945] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "9d915860-6789-4574-b30f-a7998c07b53e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.417222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.847309] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "2ae41965-d345-4358-92bc-7e43d81aca50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.847617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.919587] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1227.350668] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1227.452299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.452601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.456961] env[69994]: INFO nova.compute.claims [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.543582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "5784a102-fd07-4717-a88b-ac94ad578af6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.543819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.871699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.046341] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1228.563661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.610052] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a07235-c3af-44e5-b75c-b153dcda20ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.618077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2551f733-68d7-4452-9d09-f0f2cd340e03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.647333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59959068-06ab-46fb-8137-fcf0507d839d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.654528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6b2c2c-0491-44b1-be5c-173171d89559 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.673101] env[69994]: DEBUG nova.compute.provider_tree [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.768248] env[69994]: DEBUG nova.network.neutron [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Port b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1228.768528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.768680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.768844] env[69994]: DEBUG nova.network.neutron [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.176693] env[69994]: DEBUG nova.scheduler.client.report [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.528322] env[69994]: DEBUG nova.network.neutron [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.682438] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.683016] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1229.685887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.814s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.687303] env[69994]: INFO nova.compute.claims [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.032049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.191266] env[69994]: DEBUG nova.compute.utils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1230.194414] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1230.534532] env[69994]: DEBUG nova.compute.manager [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1230.695283] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1230.872321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f457a3-4d95-4143-8d5e-dc83dcd07256 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.880198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf14f341-3c91-4aea-8aba-dc99da7063dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.913639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2eae8a2-8fa2-4923-b35b-d15632de7ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.921085] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fc0aee-bb1a-4adf-9fe6-f374bdb0aa98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.934022] env[69994]: DEBUG nova.compute.provider_tree [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.124176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.124427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.439229] env[69994]: DEBUG nova.scheduler.client.report [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.627775] env[69994]: INFO nova.compute.manager [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Detaching volume a387ddfa-4996-4758-be71-d088f121096d [ 1231.663899] env[69994]: INFO nova.virt.block_device [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Attempting to driver detach volume a387ddfa-4996-4758-be71-d088f121096d from mountpoint /dev/sdb [ 1231.664179] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1231.664396] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587642', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'name': 'volume-a387ddfa-4996-4758-be71-d088f121096d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '3c374550-d65b-494a-89d7-60720f6b44dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'serial': 'a387ddfa-4996-4758-be71-d088f121096d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1231.665328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a3b6c6-7f23-42e6-a914-7bdc799974d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.691030] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e3260f-42fc-43db-8e7a-169948166b0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.697913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d3f202-d887-49cd-bef3-d9e9f0df6edd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.718535] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1231.720724] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.722171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237712cf-09bc-4404-8966-c359d7ae4bef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.741068] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] The volume has not been displaced from its original location: [datastore2] volume-a387ddfa-4996-4758-be71-d088f121096d/volume-a387ddfa-4996-4758-be71-d088f121096d.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1231.747189] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1231.747487] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8d1bd93-ab16-4395-afd0-485706786299 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.767771] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1231.767771] env[69994]: value = "task-2926437" [ 1231.767771] env[69994]: _type = "Task" [ 1231.767771] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.772993] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1231.773239] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.773401] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1231.773583] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.773728] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1231.773874] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1231.774090] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1231.774255] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1231.774627] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1231.774874] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1231.775085] env[69994]: DEBUG nova.virt.hardware [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1231.775840] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf1da6e-42cf-4c60-a336-f940ac341624 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.783489] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.786854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3d5d16-5693-499c-8043-b25302eee176 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.802638] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.808356] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Creating folder: Project (ebec749637974a65a509d39c709c06d2). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.808728] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce8f1383-04f6-4169-ab55-b975b15ced39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.817783] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Created folder: Project (ebec749637974a65a509d39c709c06d2) in parent group-v587342. [ 1231.819785] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Creating folder: Instances. Parent ref: group-v587653. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.819785] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac8de092-5bdb-4312-85ca-98eba1ec851f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.826574] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Created folder: Instances in parent group-v587653. [ 1231.826574] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1231.826574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.826749] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca885829-46e5-4b5a-8360-77b462046625 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.842336] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.842336] env[69994]: value = "task-2926440" [ 1231.842336] env[69994]: _type = "Task" [ 1231.842336] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.850162] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926440, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.946675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.946861] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1231.949520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.386s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.950901] env[69994]: INFO nova.compute.claims [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.277764] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926437, 'name': ReconfigVM_Task, 'duration_secs': 0.246114} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.278194] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1232.282832] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bea8a4b6-19b0-4ea3-b8c5-4aec4dcd2c22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.297284] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1232.297284] env[69994]: value = "task-2926441" [ 1232.297284] env[69994]: _type = "Task" [ 1232.297284] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.306136] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.354877] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926440, 'name': CreateVM_Task, 'duration_secs': 0.295172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.355073] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1232.355490] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.355659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.356223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1232.356314] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107d985a-de18-44dd-8d7d-9f25a4207f91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.361203] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1232.361203] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5206922b-6e41-0df4-c267-9133fd45833d" [ 1232.361203] env[69994]: _type = "Task" [ 1232.361203] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.368891] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5206922b-6e41-0df4-c267-9133fd45833d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.414553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.414791] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.455922] env[69994]: DEBUG nova.compute.utils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1232.458492] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1232.807836] env[69994]: DEBUG oslo_vmware.api [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926441, 'name': ReconfigVM_Task, 'duration_secs': 0.147061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.808163] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587642', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'name': 'volume-a387ddfa-4996-4758-be71-d088f121096d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '3c374550-d65b-494a-89d7-60720f6b44dc', 'attached_at': '', 'detached_at': '', 'volume_id': 'a387ddfa-4996-4758-be71-d088f121096d', 'serial': 'a387ddfa-4996-4758-be71-d088f121096d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1232.872464] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5206922b-6e41-0df4-c267-9133fd45833d, 'name': SearchDatastore_Task, 'duration_secs': 0.012113} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.872808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.873089] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.873384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.873491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.873671] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.873926] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-106bdcce-af10-4708-89d4-91ecb23a39c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.882818] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.883017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.883722] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e12f9722-1c3a-4c3b-9427-bd9429310e69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.888493] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1232.888493] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a85ce2-1191-18de-9dcd-f0409b647864" [ 1232.888493] env[69994]: _type = "Task" [ 1232.888493] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.895808] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a85ce2-1191-18de-9dcd-f0409b647864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.917940] env[69994]: DEBUG nova.compute.utils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1232.959812] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1233.127835] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c72b376-04d1-4526-80f5-a2cd463f0423 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.135336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe29277d-5d0a-4468-87a5-3597983ff2ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.166035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e510555-5434-4ba4-add4-eb7c1a951d33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.173378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa0bdcd-41aa-48a5-9171-b62b5d583ea1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.187113] env[69994]: DEBUG nova.compute.provider_tree [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.357279] env[69994]: DEBUG nova.objects.instance [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'flavor' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.399483] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a85ce2-1191-18de-9dcd-f0409b647864, 'name': SearchDatastore_Task, 'duration_secs': 0.008455} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.400573] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52706167-3547-4abd-a6d6-b0d4c8614aa7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.407329] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1233.407329] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a581f-0752-3287-ba8b-f28aa3441c26" [ 1233.407329] env[69994]: _type = "Task" [ 1233.407329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.415797] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a581f-0752-3287-ba8b-f28aa3441c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.420745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.690272] env[69994]: DEBUG nova.scheduler.client.report [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.919293] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520a581f-0752-3287-ba8b-f28aa3441c26, 'name': SearchDatastore_Task, 'duration_secs': 0.009646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.919293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.919293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9d915860-6789-4574-b30f-a7998c07b53e/9d915860-6789-4574-b30f-a7998c07b53e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.919293] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1b59b23-ecf1-4e8f-acb0-a82749d07b02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.925329] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1233.925329] env[69994]: value = "task-2926442" [ 1233.925329] env[69994]: _type = "Task" [ 1233.925329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.933532] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.977314] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1234.038957] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1234.039380] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1234.039663] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1234.040009] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1234.040310] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1234.040588] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1234.040967] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1234.041415] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1234.041594] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1234.041899] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1234.042328] env[69994]: DEBUG nova.virt.hardware [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1234.043799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a8f695-4521-47ce-bf45-b4fb106b2244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.057910] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f088f1-103d-4ee7-a357-0a12d0a6a7b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.083636] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1234.092191] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1234.092700] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1234.093064] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d92d8a6-71a4-4a44-a60d-559b76577923 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.113174] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1234.113174] env[69994]: value = "task-2926443" [ 1234.113174] env[69994]: _type = "Task" [ 1234.113174] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.124189] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926443, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.195139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.195766] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.198555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.478s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.365097] env[69994]: DEBUG oslo_concurrency.lockutils [None req-74f0aca9-be27-4b12-9e11-165bcd5b2f11 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.240s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.441118] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.422103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.441118] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 9d915860-6789-4574-b30f-a7998c07b53e/9d915860-6789-4574-b30f-a7998c07b53e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.441118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.441118] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cad3117-ddce-4ae5-9856-ca9fbabc5650 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.445856] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1234.445856] env[69994]: value = "task-2926444" [ 1234.445856] env[69994]: _type = "Task" [ 1234.445856] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.453912] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.527800] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.528098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.528359] env[69994]: INFO nova.compute.manager [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Attaching volume 52e8b64c-8951-4235-aa9b-b1489343c780 to /dev/sdb [ 1234.561345] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e41bacf-1445-467b-b2e1-d540fd7219f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.568824] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea20f47-39d9-4d93-9e7e-884316b710dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.582072] env[69994]: DEBUG nova.virt.block_device [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating existing volume attachment record: 367d8cd3-1663-4e7d-bad9-87925c82a2f2 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1234.623667] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926443, 'name': CreateVM_Task, 'duration_secs': 0.399478} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.623855] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1234.624301] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.624464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.624785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1234.625048] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c97f8f4d-6f6d-44ac-a32a-f4af7deb794e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.630683] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1234.630683] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f1de5-3981-8235-eca1-b59dbe4c477e" [ 1234.630683] env[69994]: _type = "Task" [ 1234.630683] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.638306] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f1de5-3981-8235-eca1-b59dbe4c477e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.701411] env[69994]: DEBUG nova.compute.utils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1234.703284] env[69994]: DEBUG nova.objects.instance [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'migration_context' on Instance uuid a828caf9-2b61-4449-b1ee-25f0828380d1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.704765] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1234.704909] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1234.764032] env[69994]: DEBUG nova.policy [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcafd04d09f45fab9d573d11d01dfbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c324e22a0046460b9ad3ad8578f7ef6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1234.962030] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077231} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.964270] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1234.965091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cab6237-8858-47c8-8cc8-d8e964c8b2e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.714981] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.719455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.719455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.719455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.719455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.719455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.727135] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 9d915860-6789-4574-b30f-a7998c07b53e/9d915860-6789-4574-b30f-a7998c07b53e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.730011] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Successfully created port: 5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.730434] env[69994]: INFO nova.compute.manager [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Terminating instance [ 1235.737562] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f53cd13-3e0a-45ac-82aa-a6fc2128072f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.753780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "966e3672-f85b-467d-8821-1e14533ee629" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.753780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.765129] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]521f1de5-3981-8235-eca1-b59dbe4c477e, 'name': SearchDatastore_Task, 'duration_secs': 0.008901} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.765129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.765372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1235.765462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.765559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.765690] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.766017] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1235.766017] env[69994]: value = "task-2926446" [ 1235.766017] env[69994]: _type = "Task" [ 1235.766017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.766390] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-918a517f-5674-4020-90ed-39f341f5236c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.777460] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.778586] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.778832] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1235.779558] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cbc83b-bc8e-49c7-a909-0ec348790cb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.786027] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1235.786027] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d99978-5c75-648a-6c39-4490fe63ba95" [ 1235.786027] env[69994]: _type = "Task" [ 1235.786027] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.794308] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d99978-5c75-648a-6c39-4490fe63ba95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.936034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e229502a-6ac5-4121-95bc-8e3cf365c4f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.943960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d32346-5a71-4a67-8228-8c5483691b02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.975014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77922b62-615a-4070-8253-1ab58cc4bc6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.982469] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aa4c28-69fc-4d8c-b491-ac01b008c1cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.995755] env[69994]: DEBUG nova.compute.provider_tree [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.256163] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1236.261052] env[69994]: DEBUG nova.compute.manager [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1236.261052] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.261052] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c204bfc-e7c1-4d18-9844-5b5dedebd5d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.268188] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.268473] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64f00c3b-4295-4cbb-8972-6433f0195074 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.277777] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926446, 'name': ReconfigVM_Task, 'duration_secs': 0.241633} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.278945] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 9d915860-6789-4574-b30f-a7998c07b53e/9d915860-6789-4574-b30f-a7998c07b53e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1236.279618] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1236.279618] env[69994]: value = "task-2926447" [ 1236.279618] env[69994]: _type = "Task" [ 1236.279618] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.279792] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecf8a93c-d658-4c43-a095-c3e3ee23a7fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.292666] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.292921] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1236.292921] env[69994]: value = "task-2926448" [ 1236.292921] env[69994]: _type = "Task" [ 1236.292921] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.299506] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d99978-5c75-648a-6c39-4490fe63ba95, 'name': SearchDatastore_Task, 'duration_secs': 0.010503} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.301610] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3399fbf-806a-4735-b0ce-b1219c5449ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.306901] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926448, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.310018] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1236.310018] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f4da5-02a2-6e2c-304f-7646451afb47" [ 1236.310018] env[69994]: _type = "Task" [ 1236.310018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.318044] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f4da5-02a2-6e2c-304f-7646451afb47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.499021] env[69994]: DEBUG nova.scheduler.client.report [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1236.736113] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1236.764622] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.764866] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.765034] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.765220] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.765370] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.765513] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.765712] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.765869] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.766050] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.766261] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.766381] env[69994]: DEBUG nova.virt.hardware [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.767272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506a521a-7beb-4f55-8941-8e42230265a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.778034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fe0972-5e07-4fb8-98a3-c6c559ac5e96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.788560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.800129] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926447, 'name': PowerOffVM_Task, 'duration_secs': 0.196042} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.800437] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.800639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.801246] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc5fd39c-33d2-4815-bbf1-5f9820dc6736 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.805291] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926448, 'name': Rename_Task, 'duration_secs': 0.170878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.805817] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1236.806055] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cee4e2c3-d1ef-4be7-85bd-5fff2ed32058 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.814098] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1236.814098] env[69994]: value = "task-2926451" [ 1236.814098] env[69994]: _type = "Task" [ 1236.814098] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.820583] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529f4da5-02a2-6e2c-304f-7646451afb47, 'name': SearchDatastore_Task, 'duration_secs': 0.010205} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.823331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.823613] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1236.823862] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.824084] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02e04504-2db6-4c78-b573-f0a23aa9542f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.829194] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1236.829194] env[69994]: value = "task-2926452" [ 1236.829194] env[69994]: _type = "Task" [ 1236.829194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.838757] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.868047] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.868314] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.868514] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore1] 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.868780] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dcb96bd-5a26-4148-afb1-d2bc7575b536 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.874674] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1236.874674] env[69994]: value = "task-2926453" [ 1236.874674] env[69994]: _type = "Task" [ 1236.874674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.882690] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.297842] env[69994]: DEBUG nova.compute.manager [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Received event network-vif-plugged-5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.298122] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] Acquiring lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.298304] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] Lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.298477] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] Lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.298650] env[69994]: DEBUG nova.compute.manager [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] No waiting events found dispatching network-vif-plugged-5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1237.298814] env[69994]: WARNING nova.compute.manager [req-e8c84988-1851-458b-bf23-5499d99eb7cb req-eed324a3-d0d3-42d6-ba6a-ab3ee470d042 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Received unexpected event network-vif-plugged-5881a64a-b640-4414-b3cd-35a42d39632b for instance with vm_state building and task_state spawning. [ 1237.326089] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926451, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.340462] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926452, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.351601] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Successfully updated port: 5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.386916] env[69994]: DEBUG oslo_vmware.api [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.387296] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1237.387570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1237.387846] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1237.388123] env[69994]: INFO nova.compute.manager [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1237.388452] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1237.388709] env[69994]: DEBUG nova.compute.manager [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1237.388865] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1237.510543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.311s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.517048] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.730s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.518666] env[69994]: INFO nova.compute.claims [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.825524] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.840549] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.840969] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1237.841338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1237.841734] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-494ab620-ed8c-4f2e-bb86-7541926ff13e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.852832] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1237.852832] env[69994]: value = "task-2926454" [ 1237.852832] env[69994]: _type = "Task" [ 1237.852832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.853222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.853222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.853443] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1237.862112] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.325245] env[69994]: DEBUG oslo_vmware.api [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926451, 'name': PowerOnVM_Task, 'duration_secs': 1.194214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.325484] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1238.325659] env[69994]: INFO nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1238.325836] env[69994]: DEBUG nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1238.326894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4aa372-f70f-4404-91ed-e0079f25de1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.360267] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.360546] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1238.361690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0e626f-155a-46cb-b7e2-ab80bbf9b594 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.381340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.382302] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b66c9f8-81f0-479c-b55a-b587a257c692 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.396574] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1238.404278] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1238.404278] env[69994]: value = "task-2926455" [ 1238.404278] env[69994]: _type = "Task" [ 1238.404278] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.412402] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.442731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.442976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.525819] env[69994]: DEBUG nova.network.neutron [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updating instance_info_cache with network_info: [{"id": "5881a64a-b640-4414-b3cd-35a42d39632b", "address": "fa:16:3e:f6:ce:27", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5881a64a-b6", "ovs_interfaceid": "5881a64a-b640-4414-b3cd-35a42d39632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.537526] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.695770] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9c11fd-2e73-4e3d-9711-a209f63b9844 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.704479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5127251c-1b0a-4cac-8439-d8459e8541cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.735772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ea9df7-15bf-41e5-bec8-1d4565ca0d06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.743905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f7ef60-eade-4ac8-afa6-c7b52e6cc818 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.757337] env[69994]: DEBUG nova.compute.provider_tree [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.842247] env[69994]: INFO nova.compute.manager [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Took 11.42 seconds to build instance. [ 1238.913239] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.945728] env[69994]: DEBUG nova.compute.utils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1239.029021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.029021] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Instance network_info: |[{"id": "5881a64a-b640-4414-b3cd-35a42d39632b", "address": "fa:16:3e:f6:ce:27", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5881a64a-b6", "ovs_interfaceid": "5881a64a-b640-4414-b3cd-35a42d39632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1239.029344] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:ce:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5881a64a-b640-4414-b3cd-35a42d39632b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.037233] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1239.037465] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1239.037695] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb2fd086-0bcd-43fe-9d83-8e089e2b140b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.054458] env[69994]: INFO nova.compute.manager [-] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Took 1.67 seconds to deallocate network for instance. [ 1239.067442] env[69994]: INFO nova.compute.manager [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Swapping old allocation on dict_keys(['2173cd1f-90eb-4aab-b51d-83c140d1a7be']) held by migration 24848846-dcfd-4ba5-a1c5-4836e1411fd8 for instance [ 1239.070656] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.070656] env[69994]: value = "task-2926456" [ 1239.070656] env[69994]: _type = "Task" [ 1239.070656] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.079236] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926456, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.091354] env[69994]: DEBUG nova.scheduler.client.report [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Overwriting current allocation {'allocations': {'2173cd1f-90eb-4aab-b51d-83c140d1a7be': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 169}}, 'project_id': 'c545eb835008401ab8672be30dbcdad9', 'user_id': 'b8a8bcbbe1454049982f693dbfa19790', 'consumer_generation': 1} on consumer a828caf9-2b61-4449-b1ee-25f0828380d1 {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1239.132306] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1239.132631] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587657', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'name': 'volume-52e8b64c-8951-4235-aa9b-b1489343c780', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7963eb9f-66a1-417b-928b-3b5cef7847be', 'attached_at': '', 'detached_at': '', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'serial': '52e8b64c-8951-4235-aa9b-b1489343c780'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1239.133517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f402865f-1fda-475f-9230-dd4936f0cadd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.150788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0132898-45c4-4c0f-bafc-87e015e4608a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.180186] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-52e8b64c-8951-4235-aa9b-b1489343c780/volume-52e8b64c-8951-4235-aa9b-b1489343c780.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.180515] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e8066ce-2a3a-40e1-93ea-b98ff0598d5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.194103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.194287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.194467] env[69994]: DEBUG nova.network.neutron [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.201026] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1239.201026] env[69994]: value = "task-2926457" [ 1239.201026] env[69994]: _type = "Task" [ 1239.201026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.209798] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926457, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.260224] env[69994]: DEBUG nova.scheduler.client.report [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.341647] env[69994]: DEBUG nova.compute.manager [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Received event network-changed-5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.341803] env[69994]: DEBUG nova.compute.manager [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Refreshing instance network info cache due to event network-changed-5881a64a-b640-4414-b3cd-35a42d39632b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1239.342081] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] Acquiring lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.342164] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] Acquired lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.342332] env[69994]: DEBUG nova.network.neutron [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Refreshing network info cache for port 5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.343795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c268b571-24d8-4d6c-b0d2-97e1dcd6fdfd tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.927s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.414033] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926455, 'name': ReconfigVM_Task, 'duration_secs': 0.790049} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.414208] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1239.414808] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b0e6a06-fe46-4e55-a21f-75698697e683 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.420524] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1239.420524] env[69994]: value = "task-2926458" [ 1239.420524] env[69994]: _type = "Task" [ 1239.420524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.427761] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926458, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.449675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.564703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.579883] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926456, 'name': CreateVM_Task, 'duration_secs': 0.357252} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.580091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1239.587181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.587359] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.587694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1239.587975] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82c84210-0de5-4da7-b819-687b73cca37b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.593028] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1239.593028] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c86e7e-ec74-44d8-c7c2-147b7932869c" [ 1239.593028] env[69994]: _type = "Task" [ 1239.593028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.604498] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c86e7e-ec74-44d8-c7c2-147b7932869c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.710522] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926457, 'name': ReconfigVM_Task, 'duration_secs': 0.354443} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.710799] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-52e8b64c-8951-4235-aa9b-b1489343c780/volume-52e8b64c-8951-4235-aa9b-b1489343c780.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1239.715643] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06a8443d-44b6-47c4-a66e-fae70afb72b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.732549] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1239.732549] env[69994]: value = "task-2926459" [ 1239.732549] env[69994]: _type = "Task" [ 1239.732549] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.740834] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.765323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.765948] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1239.769029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.204s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.771613] env[69994]: DEBUG nova.objects.instance [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'resources' on Instance uuid 3c374550-d65b-494a-89d7-60720f6b44dc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.937698] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926458, 'name': Rename_Task, 'duration_secs': 0.215883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.938015] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.938256] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73aeffe1-1583-4ad9-af8b-e3eebad7b661 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.944415] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1239.944415] env[69994]: value = "task-2926460" [ 1239.944415] env[69994]: _type = "Task" [ 1239.944415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.954476] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.993094] env[69994]: DEBUG nova.network.neutron [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [{"id": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "address": "fa:16:3e:d5:49:79", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b823a-ee", "ovs_interfaceid": "b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.049582] env[69994]: DEBUG nova.network.neutron [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updated VIF entry in instance network info cache for port 5881a64a-b640-4414-b3cd-35a42d39632b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1240.049929] env[69994]: DEBUG nova.network.neutron [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updating instance_info_cache with network_info: [{"id": "5881a64a-b640-4414-b3cd-35a42d39632b", "address": "fa:16:3e:f6:ce:27", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5881a64a-b6", "ovs_interfaceid": "5881a64a-b640-4414-b3cd-35a42d39632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.102887] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c86e7e-ec74-44d8-c7c2-147b7932869c, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.103175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.103402] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.103653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.103803] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.103981] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.104246] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae6e3ada-13f3-4287-8391-e9de3a7321ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.114554] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.114733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1240.115453] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e30fc457-eb3b-4697-9227-99943608d4da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.120495] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1240.120495] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524d5da2-4f56-5b68-6478-57a1dc4e0b93" [ 1240.120495] env[69994]: _type = "Task" [ 1240.120495] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.128617] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524d5da2-4f56-5b68-6478-57a1dc4e0b93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.243149] env[69994]: DEBUG oslo_vmware.api [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926459, 'name': ReconfigVM_Task, 'duration_secs': 0.143191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.243485] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587657', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'name': 'volume-52e8b64c-8951-4235-aa9b-b1489343c780', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7963eb9f-66a1-417b-928b-3b5cef7847be', 'attached_at': '', 'detached_at': '', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'serial': '52e8b64c-8951-4235-aa9b-b1489343c780'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1240.274740] env[69994]: DEBUG nova.compute.utils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1240.278886] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1240.279075] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1240.317965] env[69994]: DEBUG nova.policy [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06c03724e54f489c89ac5068010cf291', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '659795e8bd77484fa20f48d704d113a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1240.415380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ecaae1-1e49-4b9b-9708-0749a394fcae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.423312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d1fa96-4653-4bdb-9905-e5337710c2b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.456279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74f98a2-2cd4-49ec-929f-26dfb479db95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.466672] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100116d6-5ca8-4133-8cff-d17622228c80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.470344] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926460, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.479993] env[69994]: DEBUG nova.compute.provider_tree [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.495511] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-a828caf9-2b61-4449-b1ee-25f0828380d1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.496413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1436935c-6c47-4865-9bd3-945383d1b1e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.503018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f7e763-80c3-486f-9455-a52bc624e251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.534780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.535066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.535322] env[69994]: INFO nova.compute.manager [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attaching volume c28f1025-5a78-457c-a46f-684978ccb6d9 to /dev/sdb [ 1240.552689] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] Releasing lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.553035] env[69994]: DEBUG nova.compute.manager [req-1e47cbb0-fcfc-47b2-8319-27b97913c693 req-0b7b4c9a-852a-4b2d-afc5-127fff72a381 service nova] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Received event network-vif-deleted-da352ba6-e52b-4b13-8514-5db1e4d826ee {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.570741] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c1937b-d15d-4d20-afce-6b4f5ed63084 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.576832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7cf9f0-b12d-4c43-90c7-d558053dfca1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.589842] env[69994]: DEBUG nova.virt.block_device [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating existing volume attachment record: 36077ea9-2676-4a3a-a97a-438dc9a3158b {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1240.612380] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Successfully created port: 372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1240.632674] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]524d5da2-4f56-5b68-6478-57a1dc4e0b93, 'name': SearchDatastore_Task, 'duration_secs': 0.010502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.633496] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35326a38-56be-4bea-b75b-73e3b253beec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.639016] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1240.639016] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5289723c-e842-8b61-e6c6-5fb9433caa85" [ 1240.639016] env[69994]: _type = "Task" [ 1240.639016] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.646507] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5289723c-e842-8b61-e6c6-5fb9433caa85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.780196] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1240.961982] env[69994]: DEBUG oslo_vmware.api [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926460, 'name': PowerOnVM_Task, 'duration_secs': 0.848064} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.962361] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1240.962510] env[69994]: INFO nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Took 6.98 seconds to spawn the instance on the hypervisor. [ 1240.962735] env[69994]: DEBUG nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1240.963562] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980e228f-8df9-4086-94a2-0237d35c722c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.983502] env[69994]: DEBUG nova.scheduler.client.report [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1241.149415] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5289723c-e842-8b61-e6c6-5fb9433caa85, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.149671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1241.149922] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5784a102-fd07-4717-a88b-ac94ad578af6/5784a102-fd07-4717-a88b-ac94ad578af6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1241.150185] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b223150-1f3e-42ef-9dde-a6e8ac40ced8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.157668] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1241.157668] env[69994]: value = "task-2926462" [ 1241.157668] env[69994]: _type = "Task" [ 1241.157668] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.165634] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.486760] env[69994]: INFO nova.compute.manager [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Took 13.63 seconds to build instance. [ 1241.493387] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.534735] env[69994]: INFO nova.scheduler.client.report [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted allocations for instance 3c374550-d65b-494a-89d7-60720f6b44dc [ 1241.586415] env[69994]: DEBUG nova.objects.instance [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid 7963eb9f-66a1-417b-928b-3b5cef7847be {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.604358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1241.604773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d39c3fc2-4434-495e-b7f0-e482fd181fd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.612332] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1241.612332] env[69994]: value = "task-2926463" [ 1241.612332] env[69994]: _type = "Task" [ 1241.612332] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.623557] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.668303] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463921} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.668447] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 5784a102-fd07-4717-a88b-ac94ad578af6/5784a102-fd07-4717-a88b-ac94ad578af6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1241.668661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.668912] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3933e9c-ee42-4204-b1df-50f1b9ec3c4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.676484] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1241.676484] env[69994]: value = "task-2926464" [ 1241.676484] env[69994]: _type = "Task" [ 1241.676484] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.684754] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.791643] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1241.826999] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1241.827314] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.827494] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1241.827681] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.827827] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1241.827988] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1241.828439] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1241.828439] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1241.828575] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1241.828728] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1241.828905] env[69994]: DEBUG nova.virt.hardware [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1241.829837] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bb6705-2301-45c8-9e99-c8c8d73cd189 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.839058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c5c098-28a7-4cc8-87be-faf9b199ffe3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.992652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3ab2e9ae-bb2e-436c-95aa-23a5aed64ab5 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.145s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.000734] env[69994]: INFO nova.compute.manager [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Rebuilding instance [ 1242.045162] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a4a7a465-b503-4122-b8b6-ebe79b1d717b tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "3c374550-d65b-494a-89d7-60720f6b44dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.054107] env[69994]: DEBUG nova.compute.manager [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1242.054107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8e3b23-8545-47e3-9298-413ba2352209 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.090017] env[69994]: DEBUG nova.compute.manager [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Received event network-vif-plugged-372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1242.090017] env[69994]: DEBUG oslo_concurrency.lockutils [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] Acquiring lock "966e3672-f85b-467d-8821-1e14533ee629-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.090017] env[69994]: DEBUG oslo_concurrency.lockutils [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] Lock "966e3672-f85b-467d-8821-1e14533ee629-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.090017] env[69994]: DEBUG oslo_concurrency.lockutils [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] Lock "966e3672-f85b-467d-8821-1e14533ee629-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.090017] env[69994]: DEBUG nova.compute.manager [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] No waiting events found dispatching network-vif-plugged-372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1242.090017] env[69994]: WARNING nova.compute.manager [req-490213d9-25d8-4e76-9a27-44a0ca4e18f4 req-b1d41934-d594-4a2c-a1ae-2ca4e9dece37 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Received unexpected event network-vif-plugged-372f3c43-b01a-4ba8-919b-804926d5fceb for instance with vm_state building and task_state spawning. [ 1242.093730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a33d8b4-1cc0-4a79-acef-e668005c56d2 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.566s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.122022] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926463, 'name': PowerOffVM_Task, 'duration_secs': 0.269857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.122412] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.123367] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1242.123677] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1242.123900] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1242.124177] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1242.124387] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1242.124604] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1242.124888] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1242.125128] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1242.125369] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1242.125605] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1242.125858] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1242.135993] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-112403df-3797-44fa-8a54-643d181830d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.155762] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Successfully updated port: 372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.163686] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1242.163686] env[69994]: value = "task-2926465" [ 1242.163686] env[69994]: _type = "Task" [ 1242.163686] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.172075] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.186132] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.186406] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1242.187187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe85e54-bb40-4c90-b8f0-82b109d04b20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.191172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.191459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.215763] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 5784a102-fd07-4717-a88b-ac94ad578af6/5784a102-fd07-4717-a88b-ac94ad578af6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.220023] env[69994]: INFO nova.compute.manager [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Detaching volume 52e8b64c-8951-4235-aa9b-b1489343c780 [ 1242.220023] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efbd802d-d245-4d62-9e41-76af368a3756 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.239650] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1242.239650] env[69994]: value = "task-2926466" [ 1242.239650] env[69994]: _type = "Task" [ 1242.239650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.247997] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.262131] env[69994]: INFO nova.virt.block_device [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Attempting to driver detach volume 52e8b64c-8951-4235-aa9b-b1489343c780 from mountpoint /dev/sdb [ 1242.262438] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1242.262651] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587657', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'name': 'volume-52e8b64c-8951-4235-aa9b-b1489343c780', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7963eb9f-66a1-417b-928b-3b5cef7847be', 'attached_at': '', 'detached_at': '', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'serial': '52e8b64c-8951-4235-aa9b-b1489343c780'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1242.263491] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e97c532-6a4f-40c7-82a5-325ed78ea570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.283844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089ed3de-cfa5-48d1-bd83-86ed0ecdfd43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.290366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a7f11a-1e58-4c1a-bd0a-7e5239c9c943 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.310382] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbade6f9-42b8-49eb-a400-a79068948fc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.324264] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] The volume has not been displaced from its original location: [datastore1] volume-52e8b64c-8951-4235-aa9b-b1489343c780/volume-52e8b64c-8951-4235-aa9b-b1489343c780.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1242.329411] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1242.329701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a309fbaf-b852-47b1-b30f-b7a6dadc52aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.347124] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1242.347124] env[69994]: value = "task-2926467" [ 1242.347124] env[69994]: _type = "Task" [ 1242.347124] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.354224] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926467, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.659182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.659365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.659531] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1242.673228] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926465, 'name': ReconfigVM_Task, 'duration_secs': 0.453451} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.674141] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582461e6-b7b1-46a9-8f44-2218d1cb0194 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.697247] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1242.697505] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1242.697696] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1242.697888] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1242.698090] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1242.698188] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1242.698389] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1242.698543] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1242.698864] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1242.699046] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1242.699253] env[69994]: DEBUG nova.virt.hardware [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1242.700046] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df238802-7cd9-4eb7-863f-2009c0bef664 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.706360] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1242.706360] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d2395a-316e-43d2-0974-ecf45565b777" [ 1242.706360] env[69994]: _type = "Task" [ 1242.706360] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.714381] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d2395a-316e-43d2-0974-ecf45565b777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.748755] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926466, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.857360] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926467, 'name': ReconfigVM_Task, 'duration_secs': 0.221172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.857634] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1242.862342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c45629d1-aab6-4f16-8316-54f58bc12337 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.877403] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1242.877403] env[69994]: value = "task-2926469" [ 1242.877403] env[69994]: _type = "Task" [ 1242.877403] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.885059] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.065790] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1243.066144] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f5d04a9-95a5-4af7-a5f8-312b95f8cce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.072685] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1243.072685] env[69994]: value = "task-2926470" [ 1243.072685] env[69994]: _type = "Task" [ 1243.072685] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.081266] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.191576] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1243.218814] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d2395a-316e-43d2-0974-ecf45565b777, 'name': SearchDatastore_Task, 'duration_secs': 0.018274} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.224104] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1243.224384] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4fa46f5-302b-4bca-a9c6-c380e052f65e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.245049] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1243.245049] env[69994]: value = "task-2926471" [ 1243.245049] env[69994]: _type = "Task" [ 1243.245049] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.250856] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926466, 'name': ReconfigVM_Task, 'duration_secs': 0.956903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.251336] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 5784a102-fd07-4717-a88b-ac94ad578af6/5784a102-fd07-4717-a88b-ac94ad578af6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1243.251954] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96832885-9865-4d3b-9e2a-748c43ada528 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.256518] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926471, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.263802] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1243.263802] env[69994]: value = "task-2926472" [ 1243.263802] env[69994]: _type = "Task" [ 1243.263802] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.272968] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926472, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.367876] env[69994]: DEBUG nova.network.neutron [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updating instance_info_cache with network_info: [{"id": "372f3c43-b01a-4ba8-919b-804926d5fceb", "address": "fa:16:3e:6d:35:c9", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372f3c43-b0", "ovs_interfaceid": "372f3c43-b01a-4ba8-919b-804926d5fceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.386576] env[69994]: DEBUG oslo_vmware.api [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926469, 'name': ReconfigVM_Task, 'duration_secs': 0.138938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.390019] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587657', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'name': 'volume-52e8b64c-8951-4235-aa9b-b1489343c780', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7963eb9f-66a1-417b-928b-3b5cef7847be', 'attached_at': '', 'detached_at': '', 'volume_id': '52e8b64c-8951-4235-aa9b-b1489343c780', 'serial': '52e8b64c-8951-4235-aa9b-b1489343c780'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1243.582913] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926470, 'name': PowerOffVM_Task, 'duration_secs': 0.133435} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.583171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1243.583415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.584171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb860f2f-349f-4df9-be7e-8866678f5fac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.590706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1243.590923] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-340b5280-f490-438e-bb8d-684b9bfa052e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.613936] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1243.614180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1243.614375] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleting the datastore file [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1243.614633] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2b68388-b821-4823-8c9a-a9bf8c097066 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.620236] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1243.620236] env[69994]: value = "task-2926474" [ 1243.620236] env[69994]: _type = "Task" [ 1243.620236] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.628653] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.754288] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926471, 'name': ReconfigVM_Task, 'duration_secs': 0.233993} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.754564] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1243.755358] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a848ad-29b3-449c-bc79-73ee0ddc7e61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.780265] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1243.783342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ec3855c-c2a6-4f7b-96d0-f675b081e182 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.801076] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926472, 'name': Rename_Task, 'duration_secs': 0.184008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.802288] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.802592] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1243.802592] env[69994]: value = "task-2926475" [ 1243.802592] env[69994]: _type = "Task" [ 1243.802592] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.802786] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a824ccd8-295a-4560-890f-6fe63054f4fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.811904] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926475, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.812949] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1243.812949] env[69994]: value = "task-2926476" [ 1243.812949] env[69994]: _type = "Task" [ 1243.812949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.820081] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.871600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.871961] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Instance network_info: |[{"id": "372f3c43-b01a-4ba8-919b-804926d5fceb", "address": "fa:16:3e:6d:35:c9", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372f3c43-b0", "ovs_interfaceid": "372f3c43-b01a-4ba8-919b-804926d5fceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1243.872592] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:35:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9297313e-7c50-4873-93d3-67284929163a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '372f3c43-b01a-4ba8-919b-804926d5fceb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.879989] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating folder: Project (659795e8bd77484fa20f48d704d113a9). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.880284] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6394ec3-8ba2-42b9-bc19-d3dd0b72269d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.891157] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created folder: Project (659795e8bd77484fa20f48d704d113a9) in parent group-v587342. [ 1243.891423] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating folder: Instances. Parent ref: group-v587660. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.891688] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5273470-f901-490a-a16d-84e1461baef4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.900583] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created folder: Instances in parent group-v587660. [ 1243.900822] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1243.901041] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.901295] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c50eace5-cbfb-4206-a14a-0db8c50cad47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.918993] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.918993] env[69994]: value = "task-2926479" [ 1243.918993] env[69994]: _type = "Task" [ 1243.918993] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.926711] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926479, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.950668] env[69994]: DEBUG nova.objects.instance [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'flavor' on Instance uuid 7963eb9f-66a1-417b-928b-3b5cef7847be {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.977864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.978352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.117073] env[69994]: DEBUG nova.compute.manager [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Received event network-changed-372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.117318] env[69994]: DEBUG nova.compute.manager [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Refreshing instance network info cache due to event network-changed-372f3c43-b01a-4ba8-919b-804926d5fceb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1244.117606] env[69994]: DEBUG oslo_concurrency.lockutils [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] Acquiring lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.117787] env[69994]: DEBUG oslo_concurrency.lockutils [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] Acquired lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.118026] env[69994]: DEBUG nova.network.neutron [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Refreshing network info cache for port 372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1244.130805] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218277} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.131055] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.131277] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1244.131507] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1244.313617] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926475, 'name': ReconfigVM_Task, 'duration_secs': 0.291324} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.316607] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to attach disk [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1/a828caf9-2b61-4449-b1ee-25f0828380d1.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.317437] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0ea41a-d85d-47dc-8a12-cdc986af67ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.325009] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926476, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.341281] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503c5c3d-8c48-44ff-9d93-24e802a0cf32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.361880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff29949-1802-4151-8f77-f15b36e1b7d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.382519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21af7be-23f8-4181-a2bb-0a6d0cbe9e20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.389122] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1244.389411] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f05dd5d5-3f4a-436c-bf52-d64410f98e20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.395479] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1244.395479] env[69994]: value = "task-2926480" [ 1244.395479] env[69994]: _type = "Task" [ 1244.395479] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.402799] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.429485] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926479, 'name': CreateVM_Task, 'duration_secs': 0.31815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.429731] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1244.430480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.430666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.431120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1244.431361] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023fd887-5176-4d79-b1ef-68444d5a130c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.435753] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1244.435753] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220e431-152b-1baf-3625-444ae87176dd" [ 1244.435753] env[69994]: _type = "Task" [ 1244.435753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.443740] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220e431-152b-1baf-3625-444ae87176dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.481041] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1244.822525] env[69994]: DEBUG oslo_vmware.api [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926476, 'name': PowerOnVM_Task, 'duration_secs': 0.594958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.822828] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1244.823031] env[69994]: INFO nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1244.823209] env[69994]: DEBUG nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1244.823967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32499489-fd4d-46cb-bdb4-6e15305752e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.827836] env[69994]: DEBUG nova.network.neutron [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updated VIF entry in instance network info cache for port 372f3c43-b01a-4ba8-919b-804926d5fceb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1244.828167] env[69994]: DEBUG nova.network.neutron [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updating instance_info_cache with network_info: [{"id": "372f3c43-b01a-4ba8-919b-804926d5fceb", "address": "fa:16:3e:6d:35:c9", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372f3c43-b0", "ovs_interfaceid": "372f3c43-b01a-4ba8-919b-804926d5fceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.906501] env[69994]: DEBUG oslo_vmware.api [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926480, 'name': PowerOnVM_Task, 'duration_secs': 0.392837} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.906780] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1244.946400] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5220e431-152b-1baf-3625-444ae87176dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010443} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.946704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.946931] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.947176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.947324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.947501] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.947753] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6f6035b-ebe0-4c85-a9d0-829ecdec4d11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.957685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-590cfcdf-b0bb-4a58-8f9c-8175a0ac962e tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 2.766s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.960978] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.960978] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1244.961837] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b3c9fda-659d-47f4-a2b2-5a9b94edad40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.969248] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1244.969248] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4191-f597-5b52-1a08-a25a4debee9d" [ 1244.969248] env[69994]: _type = "Task" [ 1244.969248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.974465] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4191-f597-5b52-1a08-a25a4debee9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.001755] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.001842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.004032] env[69994]: INFO nova.compute.claims [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1245.144311] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1245.144538] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587659', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'name': 'volume-c28f1025-5a78-457c-a46f-684978ccb6d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'serial': 'c28f1025-5a78-457c-a46f-684978ccb6d9'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1245.145427] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a270bd0b-2655-4859-b279-89c8d248a13e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.164348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee71da6-5e0e-4661-b640-6f2e9173dde6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.191145] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-c28f1025-5a78-457c-a46f-684978ccb6d9/volume-c28f1025-5a78-457c-a46f-684978ccb6d9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.193453] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1245.193681] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1245.193838] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1245.194030] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1245.194184] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1245.194331] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1245.194543] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1245.194772] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1245.194967] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1245.195148] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1245.195328] env[69994]: DEBUG nova.virt.hardware [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1245.195581] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-939bbd65-a41d-4f6a-abcc-a3876844d97c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.208692] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f325886-92a9-40de-b37d-2f9cf8bdc9ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.217589] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff9efb3-7736-4540-b434-e7ce047d8e68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.221293] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1245.221293] env[69994]: value = "task-2926481" [ 1245.221293] env[69994]: _type = "Task" [ 1245.221293] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.232102] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1245.237780] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1245.238313] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1245.238528] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fadaa273-b63c-4e43-8864-6ea2c78d3fdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.253326] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.257913] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1245.257913] env[69994]: value = "task-2926482" [ 1245.257913] env[69994]: _type = "Task" [ 1245.257913] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.267501] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926482, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.334259] env[69994]: DEBUG oslo_concurrency.lockutils [req-dab13752-9ee8-46d1-9251-2ff3b789899f req-83816f95-0eac-4570-b287-8aec8b1ffa94 service nova] Releasing lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.341816] env[69994]: INFO nova.compute.manager [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Took 16.79 seconds to build instance. [ 1245.425494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.425761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.425978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.426243] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.426426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.428417] env[69994]: INFO nova.compute.manager [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Terminating instance [ 1245.477872] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522f4191-f597-5b52-1a08-a25a4debee9d, 'name': SearchDatastore_Task, 'duration_secs': 0.022692} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.478451] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-244c639e-8c6b-40cb-8a6d-29eb5de591a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.483849] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1245.483849] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eae6a3-5be4-86a3-6b31-41380be16303" [ 1245.483849] env[69994]: _type = "Task" [ 1245.483849] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.492026] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eae6a3-5be4-86a3-6b31-41380be16303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.732630] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926481, 'name': ReconfigVM_Task, 'duration_secs': 0.462687} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.732975] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-c28f1025-5a78-457c-a46f-684978ccb6d9/volume-c28f1025-5a78-457c-a46f-684978ccb6d9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1245.738549] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af6cc51b-90fc-4f33-97b1-2fa8b8178ea4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.756437] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1245.756437] env[69994]: value = "task-2926483" [ 1245.756437] env[69994]: _type = "Task" [ 1245.756437] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.767339] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926483, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.770358] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926482, 'name': CreateVM_Task, 'duration_secs': 0.27313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.770518] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1245.770898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.771066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.771412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1245.772057] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-377da3be-0331-42c8-b4df-661b06da4e56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.776307] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1245.776307] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262b5d8-cd11-a89c-64bb-debb6d7f5983" [ 1245.776307] env[69994]: _type = "Task" [ 1245.776307] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.785541] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262b5d8-cd11-a89c-64bb-debb6d7f5983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.844657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2dab4f6a-7afd-4ffd-b334-3525f3d8d7c0 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.301s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.933805] env[69994]: DEBUG nova.compute.manager [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1245.934158] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1245.935610] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a3fdae-86be-4b84-a879-0d044f644fa4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.943998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.944265] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bca9897-a310-4ac6-aac0-11c5fe13fcf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.950483] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1245.950483] env[69994]: value = "task-2926484" [ 1245.950483] env[69994]: _type = "Task" [ 1245.950483] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.955999] env[69994]: INFO nova.compute.manager [None req-d0b42d85-68d2-440b-a839-47645de7736c tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance to original state: 'active' [ 1245.965359] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.993600] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52eae6a3-5be4-86a3-6b31-41380be16303, 'name': SearchDatastore_Task, 'duration_secs': 0.009095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.993857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.994129] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/966e3672-f85b-467d-8821-1e14533ee629.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.994389] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0104c7e3-6563-49ba-a4dd-8a910253430c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.001488] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1246.001488] env[69994]: value = "task-2926485" [ 1246.001488] env[69994]: _type = "Task" [ 1246.001488] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.013040] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.141954] env[69994]: DEBUG nova.compute.manager [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Received event network-changed-5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.142217] env[69994]: DEBUG nova.compute.manager [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Refreshing instance network info cache due to event network-changed-5881a64a-b640-4414-b3cd-35a42d39632b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1246.142471] env[69994]: DEBUG oslo_concurrency.lockutils [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] Acquiring lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.142687] env[69994]: DEBUG oslo_concurrency.lockutils [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] Acquired lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.142896] env[69994]: DEBUG nova.network.neutron [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Refreshing network info cache for port 5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.162947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecae9479-827b-4c8d-bef8-afbedd111b70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.172192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965c7316-b9ae-4f74-801d-f9719ff91e91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.203097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a806ad66-c756-4eef-9b2b-07cea9c5d778 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.210242] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1266b73d-93a5-405e-a0bf-c90f39efbe7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.225876] env[69994]: DEBUG nova.compute.provider_tree [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.267842] env[69994]: DEBUG oslo_vmware.api [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926483, 'name': ReconfigVM_Task, 'duration_secs': 0.142995} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.268144] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587659', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'name': 'volume-c28f1025-5a78-457c-a46f-684978ccb6d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'serial': 'c28f1025-5a78-457c-a46f-684978ccb6d9'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1246.285764] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5262b5d8-cd11-a89c-64bb-debb6d7f5983, 'name': SearchDatastore_Task, 'duration_secs': 0.012697} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.286060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.286288] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1246.286520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.286662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.286836] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1246.287101] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ef94fbd-7b6e-47ca-a2f2-091b99775213 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.295634] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1246.295818] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1246.296528] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98269a52-6928-41c7-abb1-75652adecd52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.301869] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1246.301869] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b74076-219d-047a-1123-89dcebfa0df4" [ 1246.301869] env[69994]: _type = "Task" [ 1246.301869] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.309441] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b74076-219d-047a-1123-89dcebfa0df4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.463045] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926484, 'name': PowerOffVM_Task, 'duration_secs': 0.186962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.465487] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.465664] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1246.466138] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c263903d-04f3-4e01-a102-4a331d5b5684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.512083] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.729262] env[69994]: DEBUG nova.scheduler.client.report [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.814561] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b74076-219d-047a-1123-89dcebfa0df4, 'name': SearchDatastore_Task, 'duration_secs': 0.010672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.815398] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e83b0197-ac2c-48de-bb1f-9fe3aa967aad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.821373] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1246.821373] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225c723-ceb2-4c5c-63a9-8ce51b581133" [ 1246.821373] env[69994]: _type = "Task" [ 1246.821373] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.832686] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225c723-ceb2-4c5c-63a9-8ce51b581133, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.916267] env[69994]: DEBUG nova.network.neutron [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updated VIF entry in instance network info cache for port 5881a64a-b640-4414-b3cd-35a42d39632b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1246.916578] env[69994]: DEBUG nova.network.neutron [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updating instance_info_cache with network_info: [{"id": "5881a64a-b640-4414-b3cd-35a42d39632b", "address": "fa:16:3e:f6:ce:27", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5881a64a-b6", "ovs_interfaceid": "5881a64a-b640-4414-b3cd-35a42d39632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.012039] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926485, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.234925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.235516] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1247.309520] env[69994]: DEBUG nova.objects.instance [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.333537] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225c723-ceb2-4c5c-63a9-8ce51b581133, 'name': SearchDatastore_Task, 'duration_secs': 0.01941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.333876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.334155] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1247.334384] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97dfa9d9-af14-4b5e-980c-1eba966fcb8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.341735] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1247.341735] env[69994]: value = "task-2926488" [ 1247.341735] env[69994]: _type = "Task" [ 1247.341735] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.351112] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.419478] env[69994]: DEBUG oslo_concurrency.lockutils [req-3bdaf205-4495-40d7-84d5-968c62cc944d req-6c68a477-3d0d-41ab-a7f4-f1be6622c052 service nova] Releasing lock "refresh_cache-5784a102-fd07-4717-a88b-ac94ad578af6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.512079] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926485, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.402092} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.512398] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/966e3672-f85b-467d-8821-1e14533ee629.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1247.512628] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1247.512871] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c330b45-cf0c-41d9-8c9d-2fec2735e254 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.519892] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1247.519892] env[69994]: value = "task-2926489" [ 1247.519892] env[69994]: _type = "Task" [ 1247.519892] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.528096] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.553224] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.553478] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.553740] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleting the datastore file [datastore1] 7963eb9f-66a1-417b-928b-3b5cef7847be {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.554130] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5a772b5-1f9a-4f87-a723-9e975266f0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.560136] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for the task: (returnval){ [ 1247.560136] env[69994]: value = "task-2926490" [ 1247.560136] env[69994]: _type = "Task" [ 1247.560136] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.568577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.568985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.569240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.569486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.569690] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.571688] env[69994]: INFO nova.compute.manager [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Terminating instance [ 1247.574310] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.742037] env[69994]: DEBUG nova.compute.utils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1247.744469] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1247.745384] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1247.798174] env[69994]: DEBUG nova.policy [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce185b7affb46fd898b46f6db1224f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a10b55bcc104c108604d402ec6d09ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1247.815229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c4119db3-a7e4-4736-b57a-e449218b0ae2 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.280s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.852034] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926488, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.031814] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.287819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.031814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1248.031814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2683480d-64b7-4501-bb24-91dd3bd8586f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.054081] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/966e3672-f85b-467d-8821-1e14533ee629.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.054429] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99de978c-6c93-4684-b1ed-2f6d08e21601 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.080328] env[69994]: DEBUG nova.compute.manager [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1248.080494] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1248.080715] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.082167] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a1e7f00-3abb-4a6a-b924-56ab94129c74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.083819] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1248.083819] env[69994]: value = "task-2926491" [ 1248.083819] env[69994]: _type = "Task" [ 1248.083819] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.088460] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1248.088460] env[69994]: value = "task-2926492" [ 1248.088460] env[69994]: _type = "Task" [ 1248.088460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.097157] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.104471] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.116637] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Successfully created port: 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1248.248916] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1248.353269] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.740018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.353657] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.353778] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1248.353991] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3f4c232-6858-4c96-b5be-4bba33d3be32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.360047] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1248.360047] env[69994]: value = "task-2926493" [ 1248.360047] env[69994]: _type = "Task" [ 1248.360047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.367851] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.578371] env[69994]: DEBUG oslo_vmware.api [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Task: {'id': task-2926490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.719214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.578700] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.578948] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1248.579239] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1248.579437] env[69994]: INFO nova.compute.manager [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Took 2.65 seconds to destroy the instance on the hypervisor. [ 1248.579681] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1248.579891] env[69994]: DEBUG nova.compute.manager [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1248.579992] env[69994]: DEBUG nova.network.neutron [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1248.594923] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.600030] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926492, 'name': PowerOffVM_Task, 'duration_secs': 0.215379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.600030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1248.600172] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1248.600431] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587652', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'name': 'volume-722872ee-b34e-4f98-a7cb-04d35102032b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'a828caf9-2b61-4449-b1ee-25f0828380d1', 'attached_at': '2025-03-11T12:41:02.000000', 'detached_at': '', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'serial': '722872ee-b34e-4f98-a7cb-04d35102032b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1248.601160] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55201cd1-cc01-4e2c-88d2-e210abe3002e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.624219] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206a5ca4-8708-484c-8388-fc9e4d7d7074 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.632143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3c4ec5-df9f-4ba5-aef3-5654bb82c733 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.651431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18db1313-92de-4acc-b663-0f54df6ade7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.668611] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] The volume has not been displaced from its original location: [datastore2] volume-722872ee-b34e-4f98-a7cb-04d35102032b/volume-722872ee-b34e-4f98-a7cb-04d35102032b.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1248.674008] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1248.674369] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37a11d97-2ae8-4d99-9e68-7a32d50e646c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.693490] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1248.693490] env[69994]: value = "task-2926494" [ 1248.693490] env[69994]: _type = "Task" [ 1248.693490] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.700937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.701246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.707917] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.869847] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065972} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.870181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1248.870962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d36391-2527-4d21-a620-9fd46cff8ad0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.890103] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.890412] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56baf3ed-f951-452f-819f-ef9ca3702016 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.908969] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1248.908969] env[69994]: value = "task-2926495" [ 1248.908969] env[69994]: _type = "Task" [ 1248.908969] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.916442] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926495, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.096204] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926491, 'name': ReconfigVM_Task, 'duration_secs': 0.734699} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.096358] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/966e3672-f85b-467d-8821-1e14533ee629.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.096975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2703af7-fbdb-4ef3-8bed-5518a1348cab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.104019] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1249.104019] env[69994]: value = "task-2926496" [ 1249.104019] env[69994]: _type = "Task" [ 1249.104019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.109839] env[69994]: DEBUG nova.compute.manager [req-06403a7c-f60f-4fa8-ad59-27b816308ebb req-e96b57d4-5fc9-4a8e-92b2-f5210118ac14 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Received event network-vif-deleted-795b02e6-feba-409a-ad9f-5932d55da938 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.109839] env[69994]: INFO nova.compute.manager [req-06403a7c-f60f-4fa8-ad59-27b816308ebb req-e96b57d4-5fc9-4a8e-92b2-f5210118ac14 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Neutron deleted interface 795b02e6-feba-409a-ad9f-5932d55da938; detaching it from the instance and deleting it from the info cache [ 1249.109839] env[69994]: DEBUG nova.network.neutron [req-06403a7c-f60f-4fa8-ad59-27b816308ebb req-e96b57d4-5fc9-4a8e-92b2-f5210118ac14 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.116057] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926496, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.205092] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926494, 'name': ReconfigVM_Task, 'duration_secs': 0.19943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.205092] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1249.210586] env[69994]: DEBUG nova.compute.utils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.212069] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c692bcb-2225-4ef8-9e55-63170c8fcfbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.233401] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1249.233401] env[69994]: value = "task-2926497" [ 1249.233401] env[69994]: _type = "Task" [ 1249.233401] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.249600] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926497, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.264121] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1249.297930] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1249.298152] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.298370] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1249.298644] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.298869] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1249.299127] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1249.299552] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1249.299808] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1249.300091] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1249.300328] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1249.300589] env[69994]: DEBUG nova.virt.hardware [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1249.301714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febc14e5-4502-41b2-ac1b-03678dc5fd77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.313744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c874d0-8d55-442d-9193-9ae01aaab18b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.418817] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.522611] env[69994]: DEBUG nova.compute.manager [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.522822] env[69994]: DEBUG oslo_concurrency.lockutils [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.523068] env[69994]: DEBUG oslo_concurrency.lockutils [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.523278] env[69994]: DEBUG oslo_concurrency.lockutils [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.523458] env[69994]: DEBUG nova.compute.manager [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] No waiting events found dispatching network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1249.523627] env[69994]: WARNING nova.compute.manager [req-99f495af-69fb-4f06-9111-6fc0329cad1e req-0fb07c4f-3121-4245-9c21-e6a33db964ee service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received unexpected event network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 for instance with vm_state building and task_state spawning. [ 1249.578829] env[69994]: DEBUG nova.network.neutron [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.604505] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Successfully updated port: 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1249.612878] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a0ecda9-736f-457f-b338-56d76e8e32d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.621044] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926496, 'name': Rename_Task, 'duration_secs': 0.14928} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.622263] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.622589] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4a4ad9b-5b8f-4a22-b1f4-c8e61555adf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.626960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d998ba20-c77a-44af-91e0-5a1a8827636a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.644624] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1249.644624] env[69994]: value = "task-2926498" [ 1249.644624] env[69994]: _type = "Task" [ 1249.644624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.659933] env[69994]: DEBUG nova.compute.manager [req-06403a7c-f60f-4fa8-ad59-27b816308ebb req-e96b57d4-5fc9-4a8e-92b2-f5210118ac14 service nova] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Detach interface failed, port_id=795b02e6-feba-409a-ad9f-5932d55da938, reason: Instance 7963eb9f-66a1-417b-928b-3b5cef7847be could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1249.663834] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.728115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.026s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.750432] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926497, 'name': ReconfigVM_Task, 'duration_secs': 0.146333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.750897] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587652', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'name': 'volume-722872ee-b34e-4f98-a7cb-04d35102032b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'a828caf9-2b61-4449-b1ee-25f0828380d1', 'attached_at': '2025-03-11T12:41:02.000000', 'detached_at': '', 'volume_id': '722872ee-b34e-4f98-a7cb-04d35102032b', 'serial': '722872ee-b34e-4f98-a7cb-04d35102032b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1249.751348] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1249.752611] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb416291-231c-4117-8de8-9810d18e1451 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.765977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1249.766278] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72e82244-cc5e-44d2-a559-ade357bf99b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.843016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1249.843288] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1249.843462] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleting the datastore file [datastore2] a828caf9-2b61-4449-b1ee-25f0828380d1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.843779] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecdd696f-79aa-44bf-b4fa-dd329747d277 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.850830] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1249.850830] env[69994]: value = "task-2926500" [ 1249.850830] env[69994]: _type = "Task" [ 1249.850830] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.858677] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.918738] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926495, 'name': ReconfigVM_Task, 'duration_secs': 0.867577} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.919059] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50/2ae41965-d345-4358-92bc-7e43d81aca50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.919763] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f77a0a4f-0303-4002-8b47-4900e79bbbb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.925740] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1249.925740] env[69994]: value = "task-2926501" [ 1249.925740] env[69994]: _type = "Task" [ 1249.925740] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.935275] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926501, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.081895] env[69994]: INFO nova.compute.manager [-] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Took 1.50 seconds to deallocate network for instance. [ 1250.107123] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.107280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.107442] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.154035] env[69994]: DEBUG oslo_vmware.api [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926498, 'name': PowerOnVM_Task, 'duration_secs': 0.478056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.154285] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.154480] env[69994]: INFO nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Took 8.36 seconds to spawn the instance on the hypervisor. [ 1250.154658] env[69994]: DEBUG nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.155404] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b36c36-0e4e-48db-af81-e38edc3f12e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.360384] env[69994]: DEBUG oslo_vmware.api [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.463134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.360665] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.360794] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1250.360977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1250.361156] env[69994]: INFO nova.compute.manager [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1250.361433] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1250.361649] env[69994]: DEBUG nova.compute.manager [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1250.361746] env[69994]: DEBUG nova.network.neutron [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1250.435898] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926501, 'name': Rename_Task, 'duration_secs': 0.279522} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.436218] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.436492] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea8f25ad-498f-436c-924f-0c42b8d80958 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.444638] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1250.444638] env[69994]: value = "task-2926503" [ 1250.444638] env[69994]: _type = "Task" [ 1250.444638] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.453173] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.589263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.589618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.589906] env[69994]: DEBUG nova.objects.instance [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lazy-loading 'resources' on Instance uuid 7963eb9f-66a1-417b-928b-3b5cef7847be {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.639268] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1250.675033] env[69994]: INFO nova.compute.manager [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Took 13.91 seconds to build instance. [ 1250.769303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.769919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.769919] env[69994]: INFO nova.compute.manager [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attaching volume f9bcf24a-fc94-4806-827d-76ea61241fca to /dev/sdc [ 1250.811277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab9c77c-e6a8-422b-9b80-d8ba567fdce1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.817996] env[69994]: DEBUG nova.network.neutron [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.823014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9f677b-4d1f-49a4-9770-ffb28efb7270 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.836174] env[69994]: DEBUG nova.virt.block_device [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating existing volume attachment record: d64db672-dcc6-4ac6-9285-328a7cc8da28 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1250.955146] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926503, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.967032] env[69994]: INFO nova.compute.manager [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Rescuing [ 1250.967032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.967032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.967183] env[69994]: DEBUG nova.network.neutron [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.177680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-889bfa1d-66f3-4889-aee3-ad1bab217e9f tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.424s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.229784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c0cf43-532b-4e7d-b26f-5ca724382c8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.239640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdbdd26-769c-4477-b7e0-568ed643d0a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.270064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278484cb-4202-4135-9094-3f431e8c8a88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.277704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38e3bec-71c3-4126-a1af-c93b6c9c6f4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.291101] env[69994]: DEBUG nova.compute.provider_tree [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.320998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.321384] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance network_info: |[{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1251.321801] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:5a:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52c1f5eb-3d4a-4faa-a30d-2b0a46430791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6645c1c7-f316-403a-98aa-8b2cca92f8e4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.329542] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1251.329756] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1251.329973] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa2881cc-a8f2-4fc8-9f64-697040c698c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.350044] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.350044] env[69994]: value = "task-2926505" [ 1251.350044] env[69994]: _type = "Task" [ 1251.350044] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.358263] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926505, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.370093] env[69994]: DEBUG nova.network.neutron [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.457867] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926503, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.559434] env[69994]: DEBUG nova.compute.manager [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1251.559760] env[69994]: DEBUG nova.compute.manager [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing instance network info cache due to event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1251.560119] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.560299] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.560533] env[69994]: DEBUG nova.network.neutron [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1251.706988] env[69994]: DEBUG nova.network.neutron [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updating instance_info_cache with network_info: [{"id": "372f3c43-b01a-4ba8-919b-804926d5fceb", "address": "fa:16:3e:6d:35:c9", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372f3c43-b0", "ovs_interfaceid": "372f3c43-b01a-4ba8-919b-804926d5fceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.796023] env[69994]: DEBUG nova.scheduler.client.report [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.860816] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926505, 'name': CreateVM_Task, 'duration_secs': 0.419001} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.860994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1251.861727] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.861890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.862234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1251.862581] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1cdf933-7bfa-4073-8dfd-2ba863d57c2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.867192] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1251.867192] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523ad85b-1985-d6ee-ddde-4a8832d30c39" [ 1251.867192] env[69994]: _type = "Task" [ 1251.867192] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.871848] env[69994]: INFO nova.compute.manager [-] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Took 1.51 seconds to deallocate network for instance. [ 1251.877488] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523ad85b-1985-d6ee-ddde-4a8832d30c39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.955647] env[69994]: DEBUG oslo_vmware.api [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926503, 'name': PowerOnVM_Task, 'duration_secs': 1.273202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.956026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.956306] env[69994]: DEBUG nova.compute.manager [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.957154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7a88cc-1883-413d-9249-a911b9d03ddb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.210489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "refresh_cache-966e3672-f85b-467d-8821-1e14533ee629" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.283453] env[69994]: DEBUG nova.network.neutron [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updated VIF entry in instance network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1252.283839] env[69994]: DEBUG nova.network.neutron [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.301561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.326092] env[69994]: INFO nova.scheduler.client.report [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Deleted allocations for instance 7963eb9f-66a1-417b-928b-3b5cef7847be [ 1252.379747] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523ad85b-1985-d6ee-ddde-4a8832d30c39, 'name': SearchDatastore_Task, 'duration_secs': 0.011107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.380098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.380316] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1252.380619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.380831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.381114] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1252.381456] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a848da00-025c-408f-ade9-8885135a2d7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.392861] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1252.393067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1252.393829] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1868576d-a026-4320-9df5-def6da5dbfe5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.400798] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1252.400798] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d342bf-c0c0-f711-344e-992436731d7d" [ 1252.400798] env[69994]: _type = "Task" [ 1252.400798] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.409924] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d342bf-c0c0-f711-344e-992436731d7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.413849] env[69994]: INFO nova.compute.manager [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Took 0.54 seconds to detach 1 volumes for instance. [ 1252.477357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.477357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.477357] env[69994]: DEBUG nova.objects.instance [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1252.786378] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.786575] env[69994]: DEBUG nova.compute.manager [req-ab2f27a6-edfc-4394-9cf3-258e38ae7ce3 req-f9fe2569-f30b-463e-bede-a34be040c1f7 service nova] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Received event network-vif-deleted-b02b823a-ee9f-4e7c-a397-79b6f4dcf8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1252.834822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f59395cd-31d0-4b39-ad46-246f6625fa32 tempest-AttachVolumeNegativeTest-1428794158 tempest-AttachVolumeNegativeTest-1428794158-project-member] Lock "7963eb9f-66a1-417b-928b-3b5cef7847be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.409s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.913149] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d342bf-c0c0-f711-344e-992436731d7d, 'name': SearchDatastore_Task, 'duration_secs': 0.016286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.914014] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e2b833a-c890-44f2-88d5-88ca2d306077 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.919950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.922194] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1252.922194] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5299c840-5bad-7400-6985-48c8d151a7eb" [ 1252.922194] env[69994]: _type = "Task" [ 1252.922194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.931989] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5299c840-5bad-7400-6985-48c8d151a7eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.098743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "2ae41965-d345-4358-92bc-7e43d81aca50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.099036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.099286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "2ae41965-d345-4358-92bc-7e43d81aca50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.099553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.099769] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.102072] env[69994]: INFO nova.compute.manager [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Terminating instance [ 1253.433496] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5299c840-5bad-7400-6985-48c8d151a7eb, 'name': SearchDatastore_Task, 'duration_secs': 0.015154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.433769] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.434050] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1253.434312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a72ffedc-f633-4d0d-b59a-f34432cd2f19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.441095] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1253.441095] env[69994]: value = "task-2926509" [ 1253.441095] env[69994]: _type = "Task" [ 1253.441095] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.449432] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.486778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b429f6c9-b328-4ce9-9f6f-8da6a39d2581 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.488008] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.568s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.488605] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.510606] env[69994]: INFO nova.scheduler.client.report [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted allocations for instance a828caf9-2b61-4449-b1ee-25f0828380d1 [ 1253.605379] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "refresh_cache-2ae41965-d345-4358-92bc-7e43d81aca50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.605583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "refresh_cache-2ae41965-d345-4358-92bc-7e43d81aca50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.605738] env[69994]: DEBUG nova.network.neutron [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.752030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.752131] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecd0d90f-6600-434d-9acc-f21c197e8f8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.760468] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1253.760468] env[69994]: value = "task-2926510" [ 1253.760468] env[69994]: _type = "Task" [ 1253.760468] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.769595] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.959422] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926509, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.018405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7da50505-ea0b-49ee-9813-ba32d5c677e9 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "a828caf9-2b61-4449-b1ee-25f0828380d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.449s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.125727] env[69994]: DEBUG nova.network.neutron [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1254.182241] env[69994]: DEBUG nova.network.neutron [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.272059] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926510, 'name': PowerOffVM_Task, 'duration_secs': 0.234638} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.272059] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.272873] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4299d9ae-c405-4c7d-9034-a3f52021414e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.292470] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02c715b-e416-4498-a2f7-f817c17eee31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.322759] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.323405] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b98c60de-4880-44f2-905e-f1eefca0a5d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.331275] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1254.331275] env[69994]: value = "task-2926512" [ 1254.331275] env[69994]: _type = "Task" [ 1254.331275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.340408] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.453750] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764251} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.454273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1254.454273] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1254.454543] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e59706d-ba1f-4e88-9e72-89fd35cabd18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.461804] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1254.461804] env[69994]: value = "task-2926513" [ 1254.461804] env[69994]: _type = "Task" [ 1254.461804] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.471878] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.685242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "refresh_cache-2ae41965-d345-4358-92bc-7e43d81aca50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.685694] env[69994]: DEBUG nova.compute.manager [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1254.685888] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1254.686788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f063eea3-7088-4e88-811c-e0921d80bdab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.695358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.695668] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf7812ef-d21e-4fa7-b929-55e14d07256f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.703537] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1254.703537] env[69994]: value = "task-2926514" [ 1254.703537] env[69994]: _type = "Task" [ 1254.703537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.713018] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.842995] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1254.843364] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.843553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.843801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.844105] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.844425] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-716d2f41-890f-4deb-90ed-83cb58a82b07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.855600] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.855797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1254.856683] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff6cbc58-ed16-4067-9c23-e185cdcba618 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.862753] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1254.862753] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5255a477-0bd3-740f-27fd-d1154ead4586" [ 1254.862753] env[69994]: _type = "Task" [ 1254.862753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.869972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.870258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.877607] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5255a477-0bd3-740f-27fd-d1154ead4586, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.971390] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066565} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.971606] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1254.972397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd55890c-046a-4f74-bf7d-7e65a387ec98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.993834] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1254.994081] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b98e0d21-96b1-4f17-b7b4-8afe1994e3c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.012970] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1255.012970] env[69994]: value = "task-2926515" [ 1255.012970] env[69994]: _type = "Task" [ 1255.012970] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.021239] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.214899] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926514, 'name': PowerOffVM_Task, 'duration_secs': 0.231155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.215278] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1255.215493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1255.215780] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50576c38-b5a5-4bd2-9d5c-3db9963cedf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.240420] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1255.240596] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1255.240768] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleting the datastore file [datastore1] 2ae41965-d345-4358-92bc-7e43d81aca50 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1255.241051] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f986152d-e65d-4621-9c6c-10acc4d5c0e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.249279] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1255.249279] env[69994]: value = "task-2926517" [ 1255.249279] env[69994]: _type = "Task" [ 1255.249279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.257804] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.376117] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1255.377956] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5255a477-0bd3-740f-27fd-d1154ead4586, 'name': SearchDatastore_Task, 'duration_secs': 0.022798} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.378935] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f249aaee-fb15-4422-bee3-0894cc484cb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.384954] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1255.384954] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52000da3-a0b7-c63b-22a2-5c7f8683e43b" [ 1255.384954] env[69994]: _type = "Task" [ 1255.384954] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.385983] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1255.386208] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587665', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'name': 'volume-f9bcf24a-fc94-4806-827d-76ea61241fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'serial': 'f9bcf24a-fc94-4806-827d-76ea61241fca'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1255.387060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8190bb13-ba1a-4ef9-bdc2-c327e1c1197c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.408479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc8d7c9-0cfd-441c-ba84-a19f5105c183 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.410825] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52000da3-a0b7-c63b-22a2-5c7f8683e43b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.436210] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-f9bcf24a-fc94-4806-827d-76ea61241fca/volume-f9bcf24a-fc94-4806-827d-76ea61241fca.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1255.436672] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbdacb5e-b72c-492b-9fc1-24801412e2da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.455650] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1255.455650] env[69994]: value = "task-2926518" [ 1255.455650] env[69994]: _type = "Task" [ 1255.455650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.465889] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.523634] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926515, 'name': ReconfigVM_Task, 'duration_secs': 0.278507} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.524494] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1255.524610] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0fe41cb-6a94-44c3-9e43-bbeed984a4c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.533593] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1255.533593] env[69994]: value = "task-2926519" [ 1255.533593] env[69994]: _type = "Task" [ 1255.533593] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.542468] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926519, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.759783] env[69994]: DEBUG oslo_vmware.api [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.759952] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1255.760179] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1255.760396] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1255.760670] env[69994]: INFO nova.compute.manager [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1255.760938] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.761130] env[69994]: DEBUG nova.compute.manager [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1255.761229] env[69994]: DEBUG nova.network.neutron [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1255.778081] env[69994]: DEBUG nova.network.neutron [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1255.897704] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52000da3-a0b7-c63b-22a2-5c7f8683e43b, 'name': SearchDatastore_Task, 'duration_secs': 0.013988} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.898726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.898971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.900486] env[69994]: INFO nova.compute.claims [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1255.903047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.903304] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1255.903752] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9511ef8e-c9b3-4aa5-8274-fef8addeb503 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.913527] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1255.913527] env[69994]: value = "task-2926520" [ 1255.913527] env[69994]: _type = "Task" [ 1255.913527] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.922152] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.966120] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926518, 'name': ReconfigVM_Task, 'duration_secs': 0.348883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.966414] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-f9bcf24a-fc94-4806-827d-76ea61241fca/volume-f9bcf24a-fc94-4806-827d-76ea61241fca.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1255.971120] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef0fd699-8f80-4652-aaac-08bf47ba608f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.987415] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1255.987415] env[69994]: value = "task-2926521" [ 1255.987415] env[69994]: _type = "Task" [ 1255.987415] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.996017] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.043692] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926519, 'name': Rename_Task, 'duration_secs': 0.145961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.043983] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1256.044241] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0ff820c-1d7d-4caa-b2dd-aed678f910fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.051945] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1256.051945] env[69994]: value = "task-2926522" [ 1256.051945] env[69994]: _type = "Task" [ 1256.051945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.060616] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.280660] env[69994]: DEBUG nova.network.neutron [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.425363] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926520, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.501085] env[69994]: DEBUG oslo_vmware.api [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926521, 'name': ReconfigVM_Task, 'duration_secs': 0.144735} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.501507] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587665', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'name': 'volume-f9bcf24a-fc94-4806-827d-76ea61241fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'serial': 'f9bcf24a-fc94-4806-827d-76ea61241fca'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1256.566034] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926522, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.784096] env[69994]: INFO nova.compute.manager [-] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Took 1.02 seconds to deallocate network for instance. [ 1256.924595] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.924996] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 1256.925727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dde3fe7-8e00-4e20-867b-a28a85854451 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.955108] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.958070] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da10974d-b3a6-46bd-8bd2-dd774961adf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.976562] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1256.976562] env[69994]: value = "task-2926525" [ 1256.976562] env[69994]: _type = "Task" [ 1256.976562] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.987288] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.063662] env[69994]: DEBUG oslo_vmware.api [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926522, 'name': PowerOnVM_Task, 'duration_secs': 0.866357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.064794] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.065013] env[69994]: INFO nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1257.065208] env[69994]: DEBUG nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1257.065970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd55fac8-5199-496c-9f7a-f0be5d54a001 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.068878] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeab61a4-ffe0-4b47-b6ec-73109ff7a36e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.077465] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52216ee7-c6ac-4477-a5fd-e25aba247641 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.113235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87836d51-8349-49f7-82e0-7bab5f516452 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.122334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084683b9-88c4-48b1-8616-eb2d9c177aa7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.137435] env[69994]: DEBUG nova.compute.provider_tree [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.290814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.486830] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.537988] env[69994]: DEBUG nova.objects.instance [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.590633] env[69994]: INFO nova.compute.manager [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Took 12.61 seconds to build instance. [ 1257.640258] env[69994]: DEBUG nova.scheduler.client.report [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1257.990470] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.043488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19a5294e-0877-4667-a9d0-24ca71a9fd27 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.273s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.092649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3026fbb2-3c43-43e8-a048-3cac70a53fcc tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.114s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.147022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.147022] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1258.148371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.858s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.148785] env[69994]: DEBUG nova.objects.instance [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lazy-loading 'resources' on Instance uuid 2ae41965-d345-4358-92bc-7e43d81aca50 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.382666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.382666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.493168] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926525, 'name': ReconfigVM_Task, 'duration_secs': 1.0619} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.498180] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 966e3672-f85b-467d-8821-1e14533ee629/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1258.498180] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ce4ebf-f2c9-4213-a2f3-5a88fa71ef22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.530219] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d5ce10f-5ba2-4d11-aee1-67d47970081e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.548803] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1258.548803] env[69994]: value = "task-2926527" [ 1258.548803] env[69994]: _type = "Task" [ 1258.548803] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.558176] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.566646] env[69994]: DEBUG nova.compute.manager [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1258.566817] env[69994]: DEBUG nova.compute.manager [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing instance network info cache due to event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1258.568460] env[69994]: DEBUG oslo_concurrency.lockutils [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.568460] env[69994]: DEBUG oslo_concurrency.lockutils [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.568460] env[69994]: DEBUG nova.network.neutron [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1258.652525] env[69994]: DEBUG nova.compute.utils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1258.659488] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1258.659751] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1258.712253] env[69994]: DEBUG nova.policy [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8a8bcbbe1454049982f693dbfa19790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c545eb835008401ab8672be30dbcdad9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1258.818895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd665eb9-7424-4625-80dd-c199427d2b59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.826670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35243af3-adbc-4da5-8d9d-45dbd84ba4c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.857241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e875148a-9f30-4b91-8f61-7100503ca594 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.866272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79249a43-38b3-419a-a122-74b6f69c6193 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.882715] env[69994]: DEBUG nova.compute.provider_tree [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.884497] env[69994]: INFO nova.compute.manager [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Detaching volume c28f1025-5a78-457c-a46f-684978ccb6d9 [ 1258.920623] env[69994]: INFO nova.virt.block_device [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attempting to driver detach volume c28f1025-5a78-457c-a46f-684978ccb6d9 from mountpoint /dev/sdb [ 1258.921033] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1258.921254] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587659', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'name': 'volume-c28f1025-5a78-457c-a46f-684978ccb6d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'serial': 'c28f1025-5a78-457c-a46f-684978ccb6d9'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1258.922206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3962da6-b1bb-4dc9-b061-5cd1cce3e523 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.951031] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92f7796-bc1c-41aa-97c4-049472c22d2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.956171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93576722-b5e4-4153-aee0-25ce5f9273c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.979795] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33991c71-3ec1-4cf7-8069-d0d7358183d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.995587] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] The volume has not been displaced from its original location: [datastore1] volume-c28f1025-5a78-457c-a46f-684978ccb6d9/volume-c28f1025-5a78-457c-a46f-684978ccb6d9.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1259.000853] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1259.001503] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b411364-9d78-4679-832c-44d17d3efa44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.020424] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1259.020424] env[69994]: value = "task-2926529" [ 1259.020424] env[69994]: _type = "Task" [ 1259.020424] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.024254] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Successfully created port: 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1259.031947] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926529, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.058848] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926527, 'name': ReconfigVM_Task, 'duration_secs': 0.179012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.059063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1259.059335] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86d3835b-71e1-4533-befd-9958bbbf86ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.073191] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1259.073191] env[69994]: value = "task-2926530" [ 1259.073191] env[69994]: _type = "Task" [ 1259.073191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.085160] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.166698] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1259.348953] env[69994]: DEBUG nova.network.neutron [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updated VIF entry in instance network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1259.349456] env[69994]: DEBUG nova.network.neutron [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.388066] env[69994]: DEBUG nova.scheduler.client.report [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.533745] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926529, 'name': ReconfigVM_Task, 'duration_secs': 0.361237} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.537062] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1259.539695] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37f0b505-e665-4f09-9314-a34bf79d899c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.556603] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1259.556603] env[69994]: value = "task-2926531" [ 1259.556603] env[69994]: _type = "Task" [ 1259.556603] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.566915] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.584407] env[69994]: DEBUG oslo_vmware.api [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926530, 'name': PowerOnVM_Task, 'duration_secs': 0.429066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.584700] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.587822] env[69994]: DEBUG nova.compute.manager [None req-bec274bd-b79a-4617-9c28-416e9ce1c285 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1259.588481] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac11ea3d-8c94-4373-8dbf-5aaf6a55e248 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.853713] env[69994]: DEBUG oslo_concurrency.lockutils [req-4689e6aa-8410-4c64-815f-9f0ea181f5b2 req-c61ea1f0-582b-40cd-97f9-f76a553aa0c3 service nova] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.895105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.747s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.900834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "328868f0-2fe9-4c04-a669-54b073c53b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.900834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.920888] env[69994]: INFO nova.scheduler.client.report [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleted allocations for instance 2ae41965-d345-4358-92bc-7e43d81aca50 [ 1260.067658] env[69994]: DEBUG oslo_vmware.api [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926531, 'name': ReconfigVM_Task, 'duration_secs': 0.243273} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.068016] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587659', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'name': 'volume-c28f1025-5a78-457c-a46f-684978ccb6d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c28f1025-5a78-457c-a46f-684978ccb6d9', 'serial': 'c28f1025-5a78-457c-a46f-684978ccb6d9'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1260.176977] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1260.205023] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1260.205162] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1260.205245] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1260.205414] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1260.205564] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1260.205714] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1260.205921] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1260.206545] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1260.207182] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1260.207182] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1260.207182] env[69994]: DEBUG nova.virt.hardware [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1260.208027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c052ca5-1574-4c26-87ee-9e455f11c23e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.220339] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe4604c-4be7-4f4e-af2f-b718c96392e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.405065] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1260.427903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fee52f9-7bfa-442f-b6de-6fe6f4aac020 tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "2ae41965-d345-4358-92bc-7e43d81aca50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.545016] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Successfully updated port: 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1260.591952] env[69994]: DEBUG nova.compute.manager [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1260.592185] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.592372] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.592555] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.592717] env[69994]: DEBUG nova.compute.manager [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] No waiting events found dispatching network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1260.592883] env[69994]: WARNING nova.compute.manager [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received unexpected event network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 for instance with vm_state building and task_state spawning. [ 1260.593056] env[69994]: DEBUG nova.compute.manager [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1260.593215] env[69994]: DEBUG nova.compute.manager [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing instance network info cache due to event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1260.593395] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.593528] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.593704] env[69994]: DEBUG nova.network.neutron [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1260.607931] env[69994]: DEBUG nova.objects.instance [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.715568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "9d915860-6789-4574-b30f-a7998c07b53e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.715568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.715821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "9d915860-6789-4574-b30f-a7998c07b53e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.716109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.716696] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.719380] env[69994]: INFO nova.compute.manager [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Terminating instance [ 1260.926779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.927064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.928816] env[69994]: INFO nova.compute.claims [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1261.047560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.128042] env[69994]: DEBUG nova.network.neutron [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.199141] env[69994]: DEBUG nova.network.neutron [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.224349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "refresh_cache-9d915860-6789-4574-b30f-a7998c07b53e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.224530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquired lock "refresh_cache-9d915860-6789-4574-b30f-a7998c07b53e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.224711] env[69994]: DEBUG nova.network.neutron [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.613927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82767c94-64d9-471a-ab90-fccf79724741 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.232s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.648454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.648659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.649959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "ba823cb8-570b-465f-a566-524b82ebc1ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.650180] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.701535] env[69994]: DEBUG oslo_concurrency.lockutils [req-93ce1fe6-0b62-4d20-9991-beb69daa9891 req-ec04299d-edef-4af9-97f6-92c5903b0f47 service nova] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.701997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.702235] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.747352] env[69994]: DEBUG nova.network.neutron [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.806488] env[69994]: DEBUG nova.network.neutron [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.085974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6549cb56-e9ad-4fee-a919-ff3f823e3661 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.095620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7018dd-47a9-4de9-b0e0-03e9613de480 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.126576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c6a0d3-4609-4ea1-93f1-1ff7db73cafb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.134918] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd55373-1611-4930-a232-39b5c2f89343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.149648] env[69994]: DEBUG nova.compute.provider_tree [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.152107] env[69994]: INFO nova.compute.manager [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Detaching volume f9bcf24a-fc94-4806-827d-76ea61241fca [ 1262.153969] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1262.194063] env[69994]: INFO nova.virt.block_device [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Attempting to driver detach volume f9bcf24a-fc94-4806-827d-76ea61241fca from mountpoint /dev/sdc [ 1262.194322] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1262.194511] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587665', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'name': 'volume-f9bcf24a-fc94-4806-827d-76ea61241fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'serial': 'f9bcf24a-fc94-4806-827d-76ea61241fca'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1262.197397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6afdb5-a4b5-4a54-a0d4-51e11104f8a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.223713] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1417465-9924-46f1-9589-28a4363959d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.232720] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2657be1b-4ea6-4635-be12-7e330d8f714d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.255248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe3ca17-4fcd-4b95-b410-1462fb7c210e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.258614] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1262.272754] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] The volume has not been displaced from its original location: [datastore2] volume-f9bcf24a-fc94-4806-827d-76ea61241fca/volume-f9bcf24a-fc94-4806-827d-76ea61241fca.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1262.278063] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfiguring VM instance instance-00000071 to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1262.280517] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d131990-a4b9-4664-adfe-a067aed2428d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.300835] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1262.300835] env[69994]: value = "task-2926533" [ 1262.300835] env[69994]: _type = "Task" [ 1262.300835] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.309565] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Releasing lock "refresh_cache-9d915860-6789-4574-b30f-a7998c07b53e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.309989] env[69994]: DEBUG nova.compute.manager [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1262.310204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1262.310486] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926533, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.311242] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ed740f-54b7-448a-b94b-c80871c951f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.319315] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1262.319576] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-affc45a5-9c8b-4595-9a41-e4767f4588fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.328509] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1262.328509] env[69994]: value = "task-2926534" [ 1262.328509] env[69994]: _type = "Task" [ 1262.328509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.338489] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.417381] env[69994]: DEBUG nova.network.neutron [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.653027] env[69994]: DEBUG nova.scheduler.client.report [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1262.671329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.812299] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926533, 'name': ReconfigVM_Task, 'duration_secs': 0.294791} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.812299] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Reconfigured VM instance instance-00000071 to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1262.816575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2432ce10-a345-475a-a11c-8762311a7343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.836516] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1262.836516] env[69994]: value = "task-2926536" [ 1262.836516] env[69994]: _type = "Task" [ 1262.836516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.842515] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926534, 'name': PowerOffVM_Task, 'duration_secs': 0.161535} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.843205] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.843205] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.843426] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-605bac5d-5618-4341-ab76-d08e5f20676c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.850513] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926536, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.871094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.871377] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.871643] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleting the datastore file [datastore1] 9d915860-6789-4574-b30f-a7998c07b53e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.871943] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c254017-0c99-4359-8a7d-b528690d2cf0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.879773] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for the task: (returnval){ [ 1262.879773] env[69994]: value = "task-2926538" [ 1262.879773] env[69994]: _type = "Task" [ 1262.879773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.888330] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.920426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.920758] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance network_info: |[{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1262.921274] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:22:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '090b588e-3c97-4b85-b16b-0a1f4c7e4b18', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1262.931181] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.931521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1262.932383] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4775cbc1-e010-44f6-b7ea-0a481640b618 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.954022] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1262.954022] env[69994]: value = "task-2926539" [ 1262.954022] env[69994]: _type = "Task" [ 1262.954022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.963132] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926539, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.159431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.159431] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1263.162130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.491s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.163545] env[69994]: INFO nova.compute.claims [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1263.347446] env[69994]: DEBUG oslo_vmware.api [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926536, 'name': ReconfigVM_Task, 'duration_secs': 0.154772} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.347775] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587665', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'name': 'volume-f9bcf24a-fc94-4806-827d-76ea61241fca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9bcf24a-fc94-4806-827d-76ea61241fca', 'serial': 'f9bcf24a-fc94-4806-827d-76ea61241fca'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1263.391676] env[69994]: DEBUG oslo_vmware.api [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Task: {'id': task-2926538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145935} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.391955] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.392158] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1263.392341] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1263.392610] env[69994]: INFO nova.compute.manager [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1263.392903] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.393202] env[69994]: DEBUG nova.compute.manager [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1263.393262] env[69994]: DEBUG nova.network.neutron [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.422100] env[69994]: DEBUG nova.network.neutron [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1263.465186] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926539, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.668536] env[69994]: DEBUG nova.compute.utils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1263.670220] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1263.670456] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1263.716811] env[69994]: DEBUG nova.policy [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56900b2a71cc423a868f3c1b81f70172', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2417f6585042417c95491eb3d7cba343', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1263.891908] env[69994]: DEBUG nova.objects.instance [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'flavor' on Instance uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.924646] env[69994]: DEBUG nova.network.neutron [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.965641] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926539, 'name': CreateVM_Task, 'duration_secs': 0.547781} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.965777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1263.966471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.966635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.966946] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1263.967496] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f733f941-9a47-402c-9d58-c5e2a34c6da6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.972839] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1263.972839] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265333c-a5a8-6d4a-d281-10ac420c8e94" [ 1263.972839] env[69994]: _type = "Task" [ 1263.972839] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.981228] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265333c-a5a8-6d4a-d281-10ac420c8e94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.993631] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Successfully created port: e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1264.175656] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1264.316575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a6a41c-1b1f-45e0-b2f0-f782b8814c39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.325069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446a22a5-94e2-4631-8cbd-a76f4b362f50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.356739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40725fb7-93e3-49ef-a794-85440a38f5bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.365294] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f2ce41-c7e9-4602-9e9d-79f83745fd54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.379143] env[69994]: DEBUG nova.compute.provider_tree [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1264.426541] env[69994]: INFO nova.compute.manager [-] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Took 1.03 seconds to deallocate network for instance. [ 1264.484241] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5265333c-a5a8-6d4a-d281-10ac420c8e94, 'name': SearchDatastore_Task, 'duration_secs': 0.01131} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.484532] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.484793] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.485090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.485271] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.485473] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.485789] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77a96adf-6087-4344-8c62-9d1a03cf1e1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.495572] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.495572] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.496205] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b44bc1-71ae-4fcd-b0f7-8dda1e439b5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.501767] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1264.501767] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528508a5-e480-965a-2235-1b51740b2ed1" [ 1264.501767] env[69994]: _type = "Task" [ 1264.501767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.511011] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528508a5-e480-965a-2235-1b51740b2ed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.882316] env[69994]: DEBUG nova.scheduler.client.report [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.897786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d0bd6f19-a549-4a4a-92c1-11998628faf8 tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.249s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.932647] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.012376] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528508a5-e480-965a-2235-1b51740b2ed1, 'name': SearchDatastore_Task, 'duration_secs': 0.009763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.013170] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58cc8ceb-a925-4421-b6fa-3923062e3ce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.019292] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1265.019292] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5278a6c4-6a49-290a-5888-a7f694ecee45" [ 1265.019292] env[69994]: _type = "Task" [ 1265.019292] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.027683] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5278a6c4-6a49-290a-5888-a7f694ecee45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.189696] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1265.217222] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1265.218030] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.218030] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1265.218030] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.218030] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1265.218221] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1265.218372] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1265.218539] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1265.218695] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1265.218950] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1265.219188] env[69994]: DEBUG nova.virt.hardware [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1265.220103] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e06ff0-3392-49b7-ba31-3ed754800b71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.229351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3b38c7-9c5e-4ea1-8160-0ca892ffe2e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.380197] env[69994]: DEBUG nova.compute.manager [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Received event network-vif-plugged-e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1265.380197] env[69994]: DEBUG oslo_concurrency.lockutils [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] Acquiring lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.380197] env[69994]: DEBUG oslo_concurrency.lockutils [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] Lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.380197] env[69994]: DEBUG oslo_concurrency.lockutils [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] Lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.380197] env[69994]: DEBUG nova.compute.manager [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] No waiting events found dispatching network-vif-plugged-e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1265.380197] env[69994]: WARNING nova.compute.manager [req-6300f22c-ef2d-4bd6-866a-e6b9a670ec89 req-2d23a2bd-1b87-48f1-9fbd-52538c33950f service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Received unexpected event network-vif-plugged-e3759990-8ec1-401b-9393-767859b0a13f for instance with vm_state building and task_state spawning. [ 1265.387194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.387652] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1265.390325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.458s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.390556] env[69994]: DEBUG nova.objects.instance [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lazy-loading 'resources' on Instance uuid 9d915860-6789-4574-b30f-a7998c07b53e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.461960] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Successfully updated port: e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1265.531737] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5278a6c4-6a49-290a-5888-a7f694ecee45, 'name': SearchDatastore_Task, 'duration_secs': 0.010915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.531737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.531898] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.532140] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98002365-ed18-4085-8028-ca39b8fa0a94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.541624] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1265.541624] env[69994]: value = "task-2926540" [ 1265.541624] env[69994]: _type = "Task" [ 1265.541624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.550421] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.852674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.852924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.893106] env[69994]: DEBUG nova.compute.utils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1265.897191] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1265.897395] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1265.952973] env[69994]: DEBUG nova.policy [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06c03724e54f489c89ac5068010cf291', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '659795e8bd77484fa20f48d704d113a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1265.966918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.967163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.967258] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.052122] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480979} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.054708] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1266.054999] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1266.055452] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38cf0f2a-8616-4422-8fcd-3c407b9cc7b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.062521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.062756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.062955] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.063191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.063321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.065034] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1266.065034] env[69994]: value = "task-2926541" [ 1266.065034] env[69994]: _type = "Task" [ 1266.065034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.066362] env[69994]: INFO nova.compute.manager [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Terminating instance [ 1266.068282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07431e2-9802-4cc8-a653-88c3a0e3d651 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.080672] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.084040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7380107-fa59-4772-b309-de2c8299778c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.115628] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500c9397-33d5-4261-aa5e-2d7aa27663f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.124099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b205f23-baa1-46a8-a393-6351a72af5db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.140898] env[69994]: DEBUG nova.compute.provider_tree [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.224445] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Successfully created port: f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1266.355705] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1266.401538] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1266.503991] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1266.578073] env[69994]: DEBUG nova.compute.manager [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1266.578302] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1266.578974] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.234636} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.579731] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9f4665-76d4-417d-88b9-cc95851ed40f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.582549] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1266.583334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c76a207-6e54-4909-8f23-0a6ba333eba5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.620743] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.620743] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.620997] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faccebec-897d-4c1a-8f11-125f3a1bc8a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.641352] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89a6522a-0f9c-441d-a880-bf311b648b69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.649315] env[69994]: DEBUG nova.scheduler.client.report [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1266.659476] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1266.659476] env[69994]: value = "task-2926543" [ 1266.659476] env[69994]: _type = "Task" [ 1266.659476] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.661276] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1266.661276] env[69994]: value = "task-2926542" [ 1266.661276] env[69994]: _type = "Task" [ 1266.661276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.679089] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926543, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.683918] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.701790] env[69994]: DEBUG nova.network.neutron [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updating instance_info_cache with network_info: [{"id": "e3759990-8ec1-401b-9393-767859b0a13f", "address": "fa:16:3e:94:56:71", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3759990-8e", "ovs_interfaceid": "e3759990-8ec1-401b-9393-767859b0a13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.881299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.155915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.158291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.277s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.159863] env[69994]: INFO nova.compute.claims [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1267.175244] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926542, 'name': PowerOffVM_Task, 'duration_secs': 0.275124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.178897] env[69994]: INFO nova.scheduler.client.report [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Deleted allocations for instance 9d915860-6789-4574-b30f-a7998c07b53e [ 1267.183434] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.183583] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.184009] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926543, 'name': ReconfigVM_Task, 'duration_secs': 0.48557} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.184228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93c6e981-4e59-480d-a3dd-77d8a12ada4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.185836] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Reconfigured VM instance instance-00000077 to attach disk [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1267.186421] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74876ef7-9996-457f-8170-4cb1cee86f13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.195330] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1267.195330] env[69994]: value = "task-2926544" [ 1267.195330] env[69994]: _type = "Task" [ 1267.195330] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.206553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.206851] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance network_info: |[{"id": "e3759990-8ec1-401b-9393-767859b0a13f", "address": "fa:16:3e:94:56:71", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3759990-8e", "ovs_interfaceid": "e3759990-8ec1-401b-9393-767859b0a13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1267.207139] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926544, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.208220] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:56:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3759990-8ec1-401b-9393-767859b0a13f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1267.215768] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.216236] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1267.216473] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d337eff-b40a-418d-b2f3-230a93e81996 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.237329] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1267.237329] env[69994]: value = "task-2926546" [ 1267.237329] env[69994]: _type = "Task" [ 1267.237329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.246602] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926546, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.273278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1267.273566] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1267.273794] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleting the datastore file [datastore2] 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1267.274123] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed5a5897-8361-475e-aaff-bb882a288c40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.281701] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for the task: (returnval){ [ 1267.281701] env[69994]: value = "task-2926547" [ 1267.281701] env[69994]: _type = "Task" [ 1267.281701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.290339] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.404477] env[69994]: DEBUG nova.compute.manager [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Received event network-changed-e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1267.404636] env[69994]: DEBUG nova.compute.manager [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Refreshing instance network info cache due to event network-changed-e3759990-8ec1-401b-9393-767859b0a13f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1267.404852] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] Acquiring lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.404997] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] Acquired lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.405344] env[69994]: DEBUG nova.network.neutron [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Refreshing network info cache for port e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1267.412314] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1267.438767] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1267.439104] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.439288] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1267.439502] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.439625] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1267.439777] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1267.440063] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1267.440248] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1267.440424] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1267.440591] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1267.440768] env[69994]: DEBUG nova.virt.hardware [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1267.441954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf7fad1-1d56-4847-ba8a-837de5d26a7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.450796] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48014c15-c02f-4c1d-ba69-3717ec417570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.687711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b517e53-062d-4a3c-90c3-d92507db1cbe tempest-ServerShowV247Test-1109353368 tempest-ServerShowV247Test-1109353368-project-member] Lock "9d915860-6789-4574-b30f-a7998c07b53e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.972s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.706576] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926544, 'name': Rename_Task, 'duration_secs': 0.181399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.706894] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1267.707183] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81056eba-b494-48b4-97b9-15f9b03ae007 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.717203] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1267.717203] env[69994]: value = "task-2926548" [ 1267.717203] env[69994]: _type = "Task" [ 1267.717203] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.726831] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.747842] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926546, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.793158] env[69994]: DEBUG oslo_vmware.api [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Task: {'id': task-2926547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.423223} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.793542] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.793792] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1267.794017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1267.794256] env[69994]: INFO nova.compute.manager [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1267.794546] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.794811] env[69994]: DEBUG nova.compute.manager [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1267.794962] env[69994]: DEBUG nova.network.neutron [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1267.935056] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Successfully updated port: f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1268.231557] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926548, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.247432] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926546, 'name': CreateVM_Task, 'duration_secs': 0.527298} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.249934] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1268.250852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.250991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.251323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1268.251607] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-143a0ba8-a7a8-4a77-856d-ef42ddb55fed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.257405] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1268.257405] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52991e44-9df2-1d9c-2b70-aef2264fc141" [ 1268.257405] env[69994]: _type = "Task" [ 1268.257405] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.270045] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52991e44-9df2-1d9c-2b70-aef2264fc141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.282365] env[69994]: DEBUG nova.network.neutron [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updated VIF entry in instance network info cache for port e3759990-8ec1-401b-9393-767859b0a13f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1268.282736] env[69994]: DEBUG nova.network.neutron [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updating instance_info_cache with network_info: [{"id": "e3759990-8ec1-401b-9393-767859b0a13f", "address": "fa:16:3e:94:56:71", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3759990-8e", "ovs_interfaceid": "e3759990-8ec1-401b-9393-767859b0a13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.328369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549fa10b-9c1e-4221-bd09-34023cf3b962 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.338851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd1c81b-3145-4f42-86ab-da8be5aedb62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.374968] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3438682e-7247-4355-912b-4675a114df79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.384360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071b3921-cdbf-408d-9363-596955f004b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.401410] env[69994]: DEBUG nova.compute.provider_tree [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.438042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.438330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.438502] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1268.508281] env[69994]: DEBUG nova.compute.manager [req-8fa83717-93f0-4b84-85cd-1d6482b1c0e0 req-eec30d11-6259-4587-8591-f507e7f14c90 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Received event network-vif-deleted-e4d88c42-18f3-404a-8d4a-68852d25e55f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1268.508697] env[69994]: INFO nova.compute.manager [req-8fa83717-93f0-4b84-85cd-1d6482b1c0e0 req-eec30d11-6259-4587-8591-f507e7f14c90 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Neutron deleted interface e4d88c42-18f3-404a-8d4a-68852d25e55f; detaching it from the instance and deleting it from the info cache [ 1268.509013] env[69994]: DEBUG nova.network.neutron [req-8fa83717-93f0-4b84-85cd-1d6482b1c0e0 req-eec30d11-6259-4587-8591-f507e7f14c90 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.732290] env[69994]: DEBUG oslo_vmware.api [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926548, 'name': PowerOnVM_Task, 'duration_secs': 0.833774} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.732636] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1268.732860] env[69994]: INFO nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1268.733324] env[69994]: DEBUG nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1268.734847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa0ffa0-a523-4198-ab8e-9e26f3817bce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.772841] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52991e44-9df2-1d9c-2b70-aef2264fc141, 'name': SearchDatastore_Task, 'duration_secs': 0.016452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.773266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.774649] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1268.774649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.774649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.774649] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.774649] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc54cc1f-2e0f-4ff4-9c10-f60b33f35524 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.786277] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e754261-1126-4ac6-bc27-49b1ba001376 req-be21b3e5-b0d0-44f2-8f75-10e055f1b840 service nova] Releasing lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.788118] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.788236] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1268.789072] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39eaf64f-6eed-4f7c-8b0e-d1fdbb1a6a26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.794838] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1268.794838] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e7f9d4-24de-b260-9ab6-137962c395f7" [ 1268.794838] env[69994]: _type = "Task" [ 1268.794838] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.804841] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e7f9d4-24de-b260-9ab6-137962c395f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.904389] env[69994]: DEBUG nova.scheduler.client.report [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.968438] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1268.977874] env[69994]: DEBUG nova.network.neutron [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.015050] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c08f10c7-efdf-462e-b0bf-baa22c354f81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.027384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0496c2-2507-44dd-b9da-ca51f4945135 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.060816] env[69994]: DEBUG nova.compute.manager [req-8fa83717-93f0-4b84-85cd-1d6482b1c0e0 req-eec30d11-6259-4587-8591-f507e7f14c90 service nova] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Detach interface failed, port_id=e4d88c42-18f3-404a-8d4a-68852d25e55f, reason: Instance 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1269.110280] env[69994]: DEBUG nova.network.neutron [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [{"id": "f906f838-ecf7-49d9-9645-270f550c7083", "address": "fa:16:3e:53:a6:01", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf906f838-ec", "ovs_interfaceid": "f906f838-ecf7-49d9-9645-270f550c7083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.257871] env[69994]: INFO nova.compute.manager [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Took 13.38 seconds to build instance. [ 1269.307202] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e7f9d4-24de-b260-9ab6-137962c395f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010156} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.308058] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08cc79a-3a86-4af2-b44a-d36a98c88296 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.313644] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1269.313644] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d5286-db8d-810a-886d-d6f6ef66cdc8" [ 1269.313644] env[69994]: _type = "Task" [ 1269.313644] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.321790] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d5286-db8d-810a-886d-d6f6ef66cdc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.409459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.251s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.409841] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1269.431790] env[69994]: DEBUG nova.compute.manager [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Received event network-vif-plugged-f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.431947] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Acquiring lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.432170] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.432334] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.432499] env[69994]: DEBUG nova.compute.manager [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] No waiting events found dispatching network-vif-plugged-f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1269.432675] env[69994]: WARNING nova.compute.manager [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Received unexpected event network-vif-plugged-f906f838-ecf7-49d9-9645-270f550c7083 for instance with vm_state building and task_state spawning. [ 1269.432828] env[69994]: DEBUG nova.compute.manager [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Received event network-changed-f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.432979] env[69994]: DEBUG nova.compute.manager [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Refreshing instance network info cache due to event network-changed-f906f838-ecf7-49d9-9645-270f550c7083. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1269.433158] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Acquiring lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.481078] env[69994]: INFO nova.compute.manager [-] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Took 1.69 seconds to deallocate network for instance. [ 1269.612530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.612858] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Instance network_info: |[{"id": "f906f838-ecf7-49d9-9645-270f550c7083", "address": "fa:16:3e:53:a6:01", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf906f838-ec", "ovs_interfaceid": "f906f838-ecf7-49d9-9645-270f550c7083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1269.613182] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Acquired lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.613366] env[69994]: DEBUG nova.network.neutron [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Refreshing network info cache for port f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1269.614591] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:a6:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9297313e-7c50-4873-93d3-67284929163a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f906f838-ecf7-49d9-9645-270f550c7083', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1269.622344] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1269.623242] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1269.623474] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-760eb175-0b5d-4d4f-934a-1916c125e414 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.645289] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1269.645289] env[69994]: value = "task-2926549" [ 1269.645289] env[69994]: _type = "Task" [ 1269.645289] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.653597] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926549, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.759716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c633970-cd0a-45b5-baca-29d29cd30151 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.889s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.825425] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529d5286-db8d-810a-886d-d6f6ef66cdc8, 'name': SearchDatastore_Task, 'duration_secs': 0.010385} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.825723] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.826016] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.826313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-638b3a87-fa91-4038-8870-1d76e5abe5ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.834235] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1269.834235] env[69994]: value = "task-2926550" [ 1269.834235] env[69994]: _type = "Task" [ 1269.834235] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.844585] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.915195] env[69994]: DEBUG nova.compute.utils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1269.916373] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1269.916548] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1269.986411] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.986520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.986687] env[69994]: DEBUG nova.objects.instance [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lazy-loading 'resources' on Instance uuid 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.989138] env[69994]: DEBUG nova.policy [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcafd04d09f45fab9d573d11d01dfbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c324e22a0046460b9ad3ad8578f7ef6f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1270.157788] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926549, 'name': CreateVM_Task, 'duration_secs': 0.314923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.157975] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1270.158821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.159018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.159349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1270.159688] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df4fda94-4994-458d-8fc3-6b5cd04ac8a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.165830] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1270.165830] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a2567-91c1-761e-1be7-e9a15dd30202" [ 1270.165830] env[69994]: _type = "Task" [ 1270.165830] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.176107] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a2567-91c1-761e-1be7-e9a15dd30202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.344453] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459756} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.344829] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.344921] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1270.345200] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9838d36-8383-41e5-802d-a8a6edddf920 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.354835] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1270.354835] env[69994]: value = "task-2926551" [ 1270.354835] env[69994]: _type = "Task" [ 1270.354835] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.363693] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.420790] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1270.631383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac17c0b-5991-4671-8c92-ecc98181b100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.640707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e275e90-a1e1-449d-be4f-0e4e06ea3418 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.676045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ef3cd6-b0e3-40be-9c43-256fd0959e61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.684396] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523a2567-91c1-761e-1be7-e9a15dd30202, 'name': SearchDatastore_Task, 'duration_secs': 0.059497} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.686463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.686702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1270.686935] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.687091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.687270] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.687580] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69d1d3ca-ec42-4c2e-b133-946e69034e70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.690269] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebde990-ab58-46cc-b700-26c8db54c5f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.704360] env[69994]: DEBUG nova.compute.provider_tree [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.710266] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.710266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1270.710266] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a5fec8c-9bc9-4a91-b5b1-d597e961c3c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.716803] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1270.716803] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8181c-2924-f587-b0cb-f2dc6b15f5dd" [ 1270.716803] env[69994]: _type = "Task" [ 1270.716803] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.724638] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8181c-2924-f587-b0cb-f2dc6b15f5dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.867377] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063548} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.867654] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1270.868672] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fe51c6-042c-482f-bc56-0c512e241d1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.891739] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1270.892742] env[69994]: DEBUG nova.network.neutron [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updated VIF entry in instance network info cache for port f906f838-ecf7-49d9-9645-270f550c7083. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1270.893102] env[69994]: DEBUG nova.network.neutron [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [{"id": "f906f838-ecf7-49d9-9645-270f550c7083", "address": "fa:16:3e:53:a6:01", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf906f838-ec", "ovs_interfaceid": "f906f838-ecf7-49d9-9645-270f550c7083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.894246] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32bea18d-33e2-45cf-8aec-81ceb83937a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.916176] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1270.916176] env[69994]: value = "task-2926552" [ 1270.916176] env[69994]: _type = "Task" [ 1270.916176] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.925524] env[69994]: INFO nova.virt.block_device [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Booting with volume de4ada45-4c6a-4478-87c3-f23eb2bca97b at /dev/sda [ 1270.927022] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926552, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.992926] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7707b35-8649-4b2a-add8-1c0c476378e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.003626] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee1781d-beff-4f19-9163-140c344e9407 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.027178] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Successfully created port: a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1271.040267] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26207d0a-155e-400f-ac66-72959ec36d33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.054014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05060ed-f4af-41da-b757-5e2c2c928015 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.097372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf456c53-6a46-4a4e-907a-bea078d63e1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.105629] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce71e5e-4945-414c-8ddb-48533945f97a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.130018] env[69994]: DEBUG nova.virt.block_device [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating existing volume attachment record: 97623a34-ed86-4f09-a185-2f2de569e69b {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1271.176733] env[69994]: DEBUG nova.compute.manager [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.176815] env[69994]: DEBUG nova.compute.manager [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing instance network info cache due to event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.177453] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.177453] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.177453] env[69994]: DEBUG nova.network.neutron [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1271.212801] env[69994]: DEBUG nova.scheduler.client.report [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.231899] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f8181c-2924-f587-b0cb-f2dc6b15f5dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.233443] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-495dacbe-a79b-4a8f-83a6-2579c1bf4db2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.240960] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1271.240960] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5233e8dc-c7a8-3f6a-9799-b3e91b94bb0a" [ 1271.240960] env[69994]: _type = "Task" [ 1271.240960] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.251084] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5233e8dc-c7a8-3f6a-9799-b3e91b94bb0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.409935] env[69994]: DEBUG oslo_concurrency.lockutils [req-1817bfd0-add0-430d-af56-81fdd88c3392 req-0a9988c0-1aff-45b6-8996-f079573e4e8d service nova] Releasing lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.427458] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926552, 'name': ReconfigVM_Task, 'duration_secs': 0.311575} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.427821] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.428592] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2cb48cde-daf6-4584-93c8-7ba569806acc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.434956] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1271.434956] env[69994]: value = "task-2926553" [ 1271.434956] env[69994]: _type = "Task" [ 1271.434956] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.444130] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926553, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.718668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.743803] env[69994]: INFO nova.scheduler.client.report [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Deleted allocations for instance 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e [ 1271.760443] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5233e8dc-c7a8-3f6a-9799-b3e91b94bb0a, 'name': SearchDatastore_Task, 'duration_secs': 0.018667} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.762989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.763329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/ba823cb8-570b-465f-a566-524b82ebc1ba.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1271.763931] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09edd88e-48ec-4b65-9c9e-632835241d25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.772074] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1271.772074] env[69994]: value = "task-2926554" [ 1271.772074] env[69994]: _type = "Task" [ 1271.772074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.782227] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.926111] env[69994]: DEBUG nova.network.neutron [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updated VIF entry in instance network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.926517] env[69994]: DEBUG nova.network.neutron [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.947345] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926553, 'name': Rename_Task, 'duration_secs': 0.150481} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.947640] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1271.948226] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-752b1eb1-4e44-4af3-b8ac-ac42ae913766 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.957379] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1271.957379] env[69994]: value = "task-2926555" [ 1271.957379] env[69994]: _type = "Task" [ 1271.957379] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.967715] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.259141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9fc9807d-6378-4440-b271-9d798e01264b tempest-AttachVolumeTestJSON-2064976721 tempest-AttachVolumeTestJSON-2064976721-project-member] Lock "1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.196s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.285963] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481435} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.286274] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/ba823cb8-570b-465f-a566-524b82ebc1ba.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1272.286490] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1272.286751] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0868820e-5fbc-4874-9f1c-1f66cbe9e40c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.294586] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1272.294586] env[69994]: value = "task-2926556" [ 1272.294586] env[69994]: _type = "Task" [ 1272.294586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.305347] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.429218] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0b5145-6aae-4b16-80cb-7abc9e7daa61 req-0bd26457-0bb2-4b63-9919-73ba2017feab service nova] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.467907] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926555, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.613918] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Successfully updated port: a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1272.808058] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072098} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.808410] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1272.809337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7173459e-30cf-4e7f-abe9-3cb16c88b193 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.841620] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/ba823cb8-570b-465f-a566-524b82ebc1ba.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1272.842197] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d086bf8-6596-414b-9b43-45726ed57768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.874989] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1272.874989] env[69994]: value = "task-2926558" [ 1272.874989] env[69994]: _type = "Task" [ 1272.874989] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.886748] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926558, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.968198] env[69994]: DEBUG oslo_vmware.api [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926555, 'name': PowerOnVM_Task, 'duration_secs': 0.894563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.968406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1272.968614] env[69994]: INFO nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1272.968797] env[69994]: DEBUG nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1272.969598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc090e4-b2cc-47ce-bbb1-3f900c093d4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.117889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.118069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.118305] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.204693] env[69994]: DEBUG nova.compute.manager [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Received event network-vif-plugged-a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1273.204915] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.205176] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.205319] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.205475] env[69994]: DEBUG nova.compute.manager [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] No waiting events found dispatching network-vif-plugged-a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1273.205645] env[69994]: WARNING nova.compute.manager [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Received unexpected event network-vif-plugged-a983eff5-af52-4477-9645-db9812917bc7 for instance with vm_state building and task_state spawning. [ 1273.205809] env[69994]: DEBUG nova.compute.manager [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Received event network-changed-a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1273.205963] env[69994]: DEBUG nova.compute.manager [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Refreshing instance network info cache due to event network-changed-a983eff5-af52-4477-9645-db9812917bc7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1273.206161] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.222410] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1273.222961] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1273.223190] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1273.223345] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1273.223523] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1273.223722] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1273.223910] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1273.224131] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1273.224297] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1273.224461] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1273.224624] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1273.224798] env[69994]: DEBUG nova.virt.hardware [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1273.226254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c482e728-2f51-4230-b833-3927a5f49110 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.234911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dade8fa4-10b8-4290-8a48-69af2f410966 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.388704] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926558, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.489122] env[69994]: INFO nova.compute.manager [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Took 12.58 seconds to build instance. [ 1273.651314] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1273.785501] env[69994]: DEBUG nova.network.neutron [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.887805] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926558, 'name': ReconfigVM_Task, 'duration_secs': 0.829174} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.888130] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfigured VM instance instance-00000079 to attach disk [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/ba823cb8-570b-465f-a566-524b82ebc1ba.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1273.888793] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6c6d3b9-417d-4e8e-b760-0eddbcbc6ed5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.896049] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1273.896049] env[69994]: value = "task-2926559" [ 1273.896049] env[69994]: _type = "Task" [ 1273.896049] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.904318] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926559, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.990730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ec839ee3-3ff7-4224-975d-3c74d21a16a4 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.090s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.288544] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.288872] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Instance network_info: |[{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1274.289236] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.289471] env[69994]: DEBUG nova.network.neutron [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Refreshing network info cache for port a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.290706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:64:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a983eff5-af52-4477-9645-db9812917bc7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.298276] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1274.299271] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1274.299591] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94a2ac3d-73e9-49dc-9ecc-870316bac244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.320628] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.320628] env[69994]: value = "task-2926560" [ 1274.320628] env[69994]: _type = "Task" [ 1274.320628] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.330486] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926560, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.406715] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926559, 'name': Rename_Task, 'duration_secs': 0.153224} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.407034] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1274.407304] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a669ef59-367b-4ed7-845c-dbd953b1f1f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.414667] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1274.414667] env[69994]: value = "task-2926561" [ 1274.414667] env[69994]: _type = "Task" [ 1274.414667] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.423679] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926561, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.836196] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926560, 'name': CreateVM_Task, 'duration_secs': 0.317589} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.836625] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1274.837084] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587667', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'name': 'volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ac72ed6c-15f7-47e3-83a0-abcd85bba128', 'attached_at': '', 'detached_at': '', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'serial': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b'}, 'attachment_id': '97623a34-ed86-4f09-a185-2f2de569e69b', 'device_type': None, 'mount_device': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1274.837291] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1274.838357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94067be0-022f-47a4-b87c-dda4dc975bb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.847088] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d94d6cd-cb26-4090-997f-89735a0f8690 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.854158] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac44fdb5-3b16-41eb-84f4-d7465ffc58a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.860719] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-247c7800-6d0e-4ee9-aa3e-7fe44ba94070 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.869081] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1274.869081] env[69994]: value = "task-2926562" [ 1274.869081] env[69994]: _type = "Task" [ 1274.869081] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.880050] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926562, 'name': RelocateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.924287] env[69994]: DEBUG oslo_vmware.api [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926561, 'name': PowerOnVM_Task, 'duration_secs': 0.462729} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.924570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1274.924824] env[69994]: INFO nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Took 7.51 seconds to spawn the instance on the hypervisor. [ 1274.925019] env[69994]: DEBUG nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.925811] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49974f6e-860e-4b6e-aa0b-c90ec5de3ef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.011202] env[69994]: DEBUG nova.network.neutron [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updated VIF entry in instance network info cache for port a983eff5-af52-4477-9645-db9812917bc7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.011767] env[69994]: DEBUG nova.network.neutron [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.231416] env[69994]: DEBUG nova.compute.manager [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Received event network-changed-e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1275.231610] env[69994]: DEBUG nova.compute.manager [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Refreshing instance network info cache due to event network-changed-e3759990-8ec1-401b-9393-767859b0a13f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1275.232209] env[69994]: DEBUG oslo_concurrency.lockutils [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] Acquiring lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.232368] env[69994]: DEBUG oslo_concurrency.lockutils [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] Acquired lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.232534] env[69994]: DEBUG nova.network.neutron [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Refreshing network info cache for port e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1275.380695] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926562, 'name': RelocateVM_Task, 'duration_secs': 0.39084} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.380950] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1275.381244] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587667', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'name': 'volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ac72ed6c-15f7-47e3-83a0-abcd85bba128', 'attached_at': '', 'detached_at': '', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'serial': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1275.382132] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e096efa-c9c5-4f01-a5bf-dbbd3e4c3b85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.398513] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db021752-fe02-42c9-a478-023a844bab9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.420735] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b/volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1275.421095] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65b7d383-9320-4c3c-949e-d2a3f860f09f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.445151] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1275.445151] env[69994]: value = "task-2926563" [ 1275.445151] env[69994]: _type = "Task" [ 1275.445151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.445657] env[69994]: INFO nova.compute.manager [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Took 12.79 seconds to build instance. [ 1275.457071] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.516601] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb1bb0e6-7d59-4931-b98c-8cd6e4fbd123 req-471d2f70-56f7-4afe-b3c1-23b6d6bdb550 service nova] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.948270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b322c6a-8033-44bc-8b71-9e5d0af4ca19 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.298s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.958092] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926563, 'name': ReconfigVM_Task, 'duration_secs': 0.428411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.958388] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b/volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1275.963831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e81277a-fb8c-49e1-b0c1-1ac1e7dce7e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.975129] env[69994]: DEBUG nova.network.neutron [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updated VIF entry in instance network info cache for port e3759990-8ec1-401b-9393-767859b0a13f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.975482] env[69994]: DEBUG nova.network.neutron [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updating instance_info_cache with network_info: [{"id": "e3759990-8ec1-401b-9393-767859b0a13f", "address": "fa:16:3e:94:56:71", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3759990-8e", "ovs_interfaceid": "e3759990-8ec1-401b-9393-767859b0a13f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.982395] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1275.982395] env[69994]: value = "task-2926565" [ 1275.982395] env[69994]: _type = "Task" [ 1275.982395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.990932] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926565, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.129418] env[69994]: INFO nova.compute.manager [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Rescuing [ 1276.129806] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.129972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.130163] env[69994]: DEBUG nova.network.neutron [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1276.479762] env[69994]: DEBUG oslo_concurrency.lockutils [req-38a64b5e-2a4e-4048-ba46-86a798e9cf8f req-f1f53390-3e76-4ed1-b0b4-833fe531de65 service nova] Releasing lock "refresh_cache-328868f0-2fe9-4c04-a669-54b073c53b14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.492960] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926565, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.524566] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.524780] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.524937] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.525105] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.525265] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.525406] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.525548] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.525683] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1276.525828] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.843860] env[69994]: DEBUG nova.network.neutron [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [{"id": "f906f838-ecf7-49d9-9645-270f550c7083", "address": "fa:16:3e:53:a6:01", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf906f838-ec", "ovs_interfaceid": "f906f838-ecf7-49d9-9645-270f550c7083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.993858] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926565, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.029016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.029289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.029459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.029636] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1277.030519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc68fc0-8289-49b0-9bfc-32a15326b8f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.038847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bdd48e-120f-4bc0-9520-f0389985ea38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.053736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa99bf9-ff56-4d6b-ba44-d3ce7b283e46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.060537] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596a46ab-4588-4448-a403-5793f209a00d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.089792] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179598MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1277.089946] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.090175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.346938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.495749] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926565, 'name': ReconfigVM_Task, 'duration_secs': 1.270162} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.496093] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587667', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'name': 'volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ac72ed6c-15f7-47e3-83a0-abcd85bba128', 'attached_at': '', 'detached_at': '', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'serial': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1277.496629] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd715f1d-f7b8-40f0-bc86-d5e2c8cd0a81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.503734] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1277.503734] env[69994]: value = "task-2926566" [ 1277.503734] env[69994]: _type = "Task" [ 1277.503734] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.512257] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926566, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.014593] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926566, 'name': Rename_Task, 'duration_secs': 0.146671} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.014973] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1278.015152] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a3b2f08-8de9-4148-93f8-8a7b368fa56e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.022253] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1278.022253] env[69994]: value = "task-2926567" [ 1278.022253] env[69994]: _type = "Task" [ 1278.022253] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.030720] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926567, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.128242] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.128517] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.128759] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5784a102-fd07-4717-a88b-ac94ad578af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.128940] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 966e3672-f85b-467d-8821-1e14533ee629 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.129134] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 94169894-f772-41c9-95a1-ddf622f2c9f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.129312] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.129483] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 328868f0-2fe9-4c04-a669-54b073c53b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.129653] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ba823cb8-570b-465f-a566-524b82ebc1ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.129825] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ac72ed6c-15f7-47e3-83a0-abcd85bba128 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1278.130130] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1278.130333] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1278.263475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f9c4ae-83bb-4e0c-a599-40f74cf020a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.273953] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72897259-13ce-4c79-a311-3f31b84c3199 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.309740] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3247b0-9ef5-46c7-a7d3-27d2880a463e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.318429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5729329f-7d69-4c30-9559-5609c556e1d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.334348] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.532479] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926567, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.837629] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.883207] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1278.883503] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-172bfec2-3cb0-4bff-923e-fcbb9887bec8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.891499] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1278.891499] env[69994]: value = "task-2926568" [ 1278.891499] env[69994]: _type = "Task" [ 1278.891499] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.900918] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.033353] env[69994]: DEBUG oslo_vmware.api [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926567, 'name': PowerOnVM_Task, 'duration_secs': 0.693302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.033781] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.034000] env[69994]: INFO nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1279.034245] env[69994]: DEBUG nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1279.035145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1020356-cf6c-45e5-8794-084c22537823 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.342859] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1279.343061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.253s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.401564] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926568, 'name': PowerOffVM_Task, 'duration_secs': 0.181263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.401827] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1279.402577] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532be401-dff2-4de2-8cf4-9a637d745abf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.420357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1166e29a-2b29-46de-a1ef-de0a0ff9a198 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.452816] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1279.453115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f93a9be4-a31a-4af4-a31a-6a6a2419017e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.460418] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1279.460418] env[69994]: value = "task-2926570" [ 1279.460418] env[69994]: _type = "Task" [ 1279.460418] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.470371] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.553504] env[69994]: INFO nova.compute.manager [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Took 12.69 seconds to build instance. [ 1279.975028] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1279.975028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1279.975028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.975028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.975028] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1279.975028] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45c8c30f-0c97-44e9-9fb4-92ffcf05b2ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.987490] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1279.987977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1279.988873] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f508b9e0-3b6a-46dc-a3a3-2d6074adaddf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.997018] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1279.997018] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cbf348-8f48-f363-eb32-6dd30325ad6f" [ 1279.997018] env[69994]: _type = "Task" [ 1279.997018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.005126] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cbf348-8f48-f363-eb32-6dd30325ad6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.056202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-40e38748-caff-493e-83cf-d3d0327c09fb tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.203s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.218693] env[69994]: DEBUG nova.compute.manager [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received event network-changed-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1280.218909] env[69994]: DEBUG nova.compute.manager [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing instance network info cache due to event network-changed-68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1280.219215] env[69994]: DEBUG oslo_concurrency.lockutils [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] Acquiring lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.219306] env[69994]: DEBUG oslo_concurrency.lockutils [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] Acquired lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.219490] env[69994]: DEBUG nova.network.neutron [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Refreshing network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.506319] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52cbf348-8f48-f363-eb32-6dd30325ad6f, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.507181] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bbe0b82-0b58-4670-be6e-9bac4e6ae5f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.513845] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1280.513845] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527052c5-1414-a5aa-b95a-c9fb9ef453a3" [ 1280.513845] env[69994]: _type = "Task" [ 1280.513845] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.523161] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527052c5-1414-a5aa-b95a-c9fb9ef453a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.919023] env[69994]: DEBUG nova.network.neutron [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updated VIF entry in instance network info cache for port 68208872-218b-45a2-b062-bedcf2b0803e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1280.919403] env[69994]: DEBUG nova.network.neutron [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [{"id": "68208872-218b-45a2-b062-bedcf2b0803e", "address": "fa:16:3e:1f:9d:a7", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68208872-21", "ovs_interfaceid": "68208872-218b-45a2-b062-bedcf2b0803e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.025150] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]527052c5-1414-a5aa-b95a-c9fb9ef453a3, 'name': SearchDatastore_Task, 'duration_secs': 0.013162} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.025431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.026053] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1281.026053] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f39d0c0a-64c8-431f-afc4-c085e66abe52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.033455] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1281.033455] env[69994]: value = "task-2926571" [ 1281.033455] env[69994]: _type = "Task" [ 1281.033455] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.041748] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.422185] env[69994]: DEBUG oslo_concurrency.lockutils [req-701a3fb4-bba3-495e-9621-454db6bff5b7 req-2a51d392-54fd-4676-b66d-22c35c222d61 service nova] Releasing lock "refresh_cache-8001cb13-6a52-451b-b4b6-57b893975079" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.545592] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926571, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.636942] env[69994]: DEBUG nova.compute.manager [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1282.049141] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.049141] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 1282.050876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eedb87-ae57-418b-b859-4e7858f6c617 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.079422] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1282.079925] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1df107a0-94e2-4aaf-934d-d0f664f3d052 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.100967] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1282.100967] env[69994]: value = "task-2926572" [ 1282.100967] env[69994]: _type = "Task" [ 1282.100967] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.110653] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.160592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.160887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.255733] env[69994]: DEBUG nova.compute.manager [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Received event network-changed-a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1282.255944] env[69994]: DEBUG nova.compute.manager [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Refreshing instance network info cache due to event network-changed-a983eff5-af52-4477-9645-db9812917bc7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1282.256352] env[69994]: DEBUG oslo_concurrency.lockutils [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.256510] env[69994]: DEBUG oslo_concurrency.lockutils [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.256678] env[69994]: DEBUG nova.network.neutron [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Refreshing network info cache for port a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.613133] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.667947] env[69994]: INFO nova.compute.claims [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.012110] env[69994]: DEBUG nova.network.neutron [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updated VIF entry in instance network info cache for port a983eff5-af52-4477-9645-db9812917bc7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.012500] env[69994]: DEBUG nova.network.neutron [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.113388] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926572, 'name': ReconfigVM_Task, 'duration_secs': 0.730604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.113772] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfigured VM instance instance-00000079 to attach disk [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1283.114827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a045e82-96fa-4a6a-8dc4-f15395c45b2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.143529] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac6cb026-f7e4-4920-869b-fb1a5b2fd4b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.161125] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1283.161125] env[69994]: value = "task-2926573" [ 1283.161125] env[69994]: _type = "Task" [ 1283.161125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.170799] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.175678] env[69994]: INFO nova.compute.resource_tracker [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating resource usage from migration 3418c69e-00b9-44c4-995d-657144e1e4c7 [ 1283.330510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f1738d-e181-4c75-a648-d7d4da7ef7ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.339725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66b5e76-568c-4f65-a99e-05e5d8770942 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.372326] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa81238c-4e96-47b5-bc4f-a9e2fdd44809 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.380978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413c0a9f-a952-4a6d-9b9e-e47a52fd15e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.395204] env[69994]: DEBUG nova.compute.provider_tree [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1283.514919] env[69994]: DEBUG oslo_concurrency.lockutils [req-d86e330f-50bc-4281-b7e8-d672137de991 req-3a2a0d80-81d1-456c-a7b3-83dc105d7c6d service nova] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.672994] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926573, 'name': ReconfigVM_Task, 'duration_secs': 0.17083} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.673493] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1283.673570] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1ada1d9-d3d7-4214-9a0b-e215c4048507 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.681887] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1283.681887] env[69994]: value = "task-2926574" [ 1283.681887] env[69994]: _type = "Task" [ 1283.681887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.690706] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.918324] env[69994]: ERROR nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [req-1f90db19-0910-4e48-9b27-aefd31d9970b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2173cd1f-90eb-4aab-b51d-83c140d1a7be. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1f90db19-0910-4e48-9b27-aefd31d9970b"}]} [ 1283.936372] env[69994]: DEBUG nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Refreshing inventories for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1283.951297] env[69994]: DEBUG nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating ProviderTree inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1283.951557] env[69994]: DEBUG nova.compute.provider_tree [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1283.964653] env[69994]: DEBUG nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Refreshing aggregate associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1283.990412] env[69994]: DEBUG nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Refreshing trait associations for resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be, traits: COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1284.146392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154e0e18-f521-4939-b12f-c6599b973fad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.158793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91186960-f7bb-41f5-985d-6c7ae674f4a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.196446] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3697c7-7236-4657-b05c-749a6de4dbaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.206673] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.208051] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b731e90-bec4-420d-84f2-7b3a1f434687 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.223853] env[69994]: DEBUG nova.compute.provider_tree [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.703360] env[69994]: DEBUG oslo_vmware.api [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926574, 'name': PowerOnVM_Task, 'duration_secs': 0.610281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.703740] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1284.706317] env[69994]: DEBUG nova.compute.manager [None req-e12b6e2a-c5ac-4cf7-af97-a9e0e2aee3a3 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1284.707079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15bc48d-4314-4eee-838a-d0801d71c549 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.756427] env[69994]: DEBUG nova.scheduler.client.report [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1284.756718] env[69994]: DEBUG nova.compute.provider_tree [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 177 to 178 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1284.757145] env[69994]: DEBUG nova.compute.provider_tree [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.262564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.101s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.262755] env[69994]: INFO nova.compute.manager [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Migrating [ 1285.777236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.777468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.777634] env[69994]: DEBUG nova.network.neutron [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.416690] env[69994]: INFO nova.compute.manager [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Unrescuing [ 1286.416945] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.417178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquired lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.417364] env[69994]: DEBUG nova.network.neutron [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.508911] env[69994]: DEBUG nova.network.neutron [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.011956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.118982] env[69994]: DEBUG nova.network.neutron [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [{"id": "f906f838-ecf7-49d9-9645-270f550c7083", "address": "fa:16:3e:53:a6:01", "network": {"id": "47871821-916c-4397-b330-8bdda7ccd6f6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1622921692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "659795e8bd77484fa20f48d704d113a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf906f838-ec", "ovs_interfaceid": "f906f838-ecf7-49d9-9645-270f550c7083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.621630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Releasing lock "refresh_cache-ba823cb8-570b-465f-a566-524b82ebc1ba" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.622586] env[69994]: DEBUG nova.objects.instance [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lazy-loading 'flavor' on Instance uuid ba823cb8-570b-465f-a566-524b82ebc1ba {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.128772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0baf67-168e-4027-925b-d8669f40ea65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.151126] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.151462] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dccfd15b-3cf1-44e5-aca8-9271db4e8bc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.160510] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1288.160510] env[69994]: value = "task-2926575" [ 1288.160510] env[69994]: _type = "Task" [ 1288.160510] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.168846] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.526933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59910b6-af85-47a4-bd51-752cf3dd8738 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.545681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1288.672015] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926575, 'name': PowerOffVM_Task, 'duration_secs': 0.245631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.672301] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.677591] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1288.677825] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43248901-dc1c-4795-af89-0fd698fa02ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.697019] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1288.697019] env[69994]: value = "task-2926576" [ 1288.697019] env[69994]: _type = "Task" [ 1288.697019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.708239] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926576, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.052254] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.052604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9f10e33-6c99-4975-b6dc-1e7d84c0d0a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.060704] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1289.060704] env[69994]: value = "task-2926577" [ 1289.060704] env[69994]: _type = "Task" [ 1289.060704] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.069025] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.207729] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926576, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.570776] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.708182] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926576, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.072067] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926577, 'name': PowerOffVM_Task, 'duration_secs': 1.002219} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.072067] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1290.072067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1290.209091] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926576, 'name': ReconfigVM_Task, 'duration_secs': 1.25614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.209470] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1290.209610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.209823] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bd02a63-ff5a-482a-9441-e0b8fc3b3c2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.216247] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1290.216247] env[69994]: value = "task-2926578" [ 1290.216247] env[69994]: _type = "Task" [ 1290.216247] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.223927] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.578650] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1290.578938] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1290.579065] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1290.579199] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1290.579344] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1290.579490] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1290.579735] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1290.579923] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1290.580108] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1290.580280] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1290.580452] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1290.585566] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c47f5dd3-d039-42dc-9f30-ce393bdbfa97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.601895] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1290.601895] env[69994]: value = "task-2926579" [ 1290.601895] env[69994]: _type = "Task" [ 1290.601895] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.609860] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.725988] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.113067] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926579, 'name': ReconfigVM_Task, 'duration_secs': 0.143484} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.113067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1291.227170] env[69994]: DEBUG oslo_vmware.api [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926578, 'name': PowerOnVM_Task, 'duration_secs': 0.573197} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.227522] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1291.227611] env[69994]: DEBUG nova.compute.manager [None req-d311d2d1-ef81-4bbc-80d0-24de57fdc567 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1291.228388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdfec74-7092-4e26-b8f0-0620421f67ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.345226] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.345440] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.618733] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1291.618979] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.619155] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1291.619334] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.619478] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1291.619621] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1291.619816] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1291.619973] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1291.620151] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1291.620312] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1291.620479] env[69994]: DEBUG nova.virt.hardware [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1291.626708] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1291.626708] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dce1d829-c8d4-471b-a27b-7abae140e713 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.645426] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1291.645426] env[69994]: value = "task-2926580" [ 1291.645426] env[69994]: _type = "Task" [ 1291.645426] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.653595] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.850732] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.850932] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.851100] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.851253] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.851394] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.851538] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.851672] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1291.851872] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.095136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "ba823cb8-570b-465f-a566-524b82ebc1ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.095463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.095639] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.095852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.095989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.098152] env[69994]: INFO nova.compute.manager [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Terminating instance [ 1292.155406] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926580, 'name': ReconfigVM_Task, 'duration_secs': 0.189308} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.155670] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1292.156462] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012d769b-477f-40b2-9b6b-55c4c7d64060 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.178158] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b/volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1292.178633] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7051fe58-62a0-4950-8922-d170f380ed86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.196484] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1292.196484] env[69994]: value = "task-2926581" [ 1292.196484] env[69994]: _type = "Task" [ 1292.196484] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.206523] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.356820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.357264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.357264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.357357] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1292.358385] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3059ba0-9e32-4569-acd8-09083ca1f85c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.367504] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e0ce01-47e3-4f6a-b11c-fcabc1382a32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.382457] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1997ba2d-0b41-4716-877b-8d9c686fe5d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.389056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd13bf1b-6bad-43e1-9066-2ba8cc1267af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.421506] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180024MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1292.421506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.421506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.601725] env[69994]: DEBUG nova.compute.manager [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1292.602032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1292.602993] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d286b971-1db6-48d8-a84e-2d92999917e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.611947] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1292.612212] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8e938f1-7533-424e-8b2f-cf3e80de2c0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.620368] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1292.620368] env[69994]: value = "task-2926582" [ 1292.620368] env[69994]: _type = "Task" [ 1292.620368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.644465] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.706766] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926581, 'name': ReconfigVM_Task, 'duration_secs': 0.269366} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.707576] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b/volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1292.707576] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1293.130840] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926582, 'name': PowerOffVM_Task, 'duration_secs': 0.205384} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.131136] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1293.131307] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1293.131557] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6352789-a5bf-4543-aba2-9d64c1225aac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.207656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1293.208036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1293.208394] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleting the datastore file [datastore2] ba823cb8-570b-465f-a566-524b82ebc1ba {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1293.208735] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-675ee283-6b1a-4d55-8e0d-891a562205fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.214069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aa2c90-f787-4435-baf3-439f9a45a1f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.217755] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1293.217755] env[69994]: value = "task-2926584" [ 1293.217755] env[69994]: _type = "Task" [ 1293.217755] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.234749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b6632a-e481-4706-8970-12aa99556254 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.240049] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.257036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1293.431946] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Applying migration context for instance ac72ed6c-15f7-47e3-83a0-abcd85bba128 as it has an incoming, in-progress migration 3418c69e-00b9-44c4-995d-657144e1e4c7. Migration status is migrating {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1293.433212] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating resource usage from migration 3418c69e-00b9-44c4-995d-657144e1e4c7 [ 1293.451769] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.451951] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452091] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 5784a102-fd07-4717-a88b-ac94ad578af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452213] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 966e3672-f85b-467d-8821-1e14533ee629 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452333] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 94169894-f772-41c9-95a1-ddf622f2c9f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452459] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452572] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 328868f0-2fe9-4c04-a669-54b073c53b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452721] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ba823cb8-570b-465f-a566-524b82ebc1ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.452800] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Migration 3418c69e-00b9-44c4-995d-657144e1e4c7 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1293.452898] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ac72ed6c-15f7-47e3-83a0-abcd85bba128 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1293.453096] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1293.453229] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1293.560538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b0a09a-9e71-451d-b691-6e7d28958a17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.568418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7971b38f-9450-4e6a-8ad5-5019fab3f3cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.598584] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06565c36-9be5-434b-9820-2477ac32db8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.607650] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0469fe35-5440-43d4-8396-d193b037a824 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.621184] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1293.729549] env[69994]: DEBUG oslo_vmware.api [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167863} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.729917] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1293.730159] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1293.730371] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1293.730566] env[69994]: INFO nova.compute.manager [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1293.730835] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1293.731197] env[69994]: DEBUG nova.compute.manager [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1293.731197] env[69994]: DEBUG nova.network.neutron [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1294.155478] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updated inventory for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with generation 178 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1294.155719] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating resource provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be generation from 178 to 179 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1294.155879] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Updating inventory in ProviderTree for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1294.228484] env[69994]: DEBUG nova.compute.manager [req-f1202d3e-a116-48c9-98fe-0af1c68e1934 req-a757d5a0-3189-4418-a9b7-1612f35d97f6 service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Received event network-vif-deleted-f906f838-ecf7-49d9-9645-270f550c7083 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.228699] env[69994]: INFO nova.compute.manager [req-f1202d3e-a116-48c9-98fe-0af1c68e1934 req-a757d5a0-3189-4418-a9b7-1612f35d97f6 service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Neutron deleted interface f906f838-ecf7-49d9-9645-270f550c7083; detaching it from the instance and deleting it from the info cache [ 1294.228877] env[69994]: DEBUG nova.network.neutron [req-f1202d3e-a116-48c9-98fe-0af1c68e1934 req-a757d5a0-3189-4418-a9b7-1612f35d97f6 service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.660489] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1294.660885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.239s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.709614] env[69994]: DEBUG nova.network.neutron [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.731172] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3782c89c-36cf-4784-a4c6-abad77a8e5d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.742783] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091e29dd-ce65-4716-9047-6d8979b79cc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.776935] env[69994]: DEBUG nova.compute.manager [req-f1202d3e-a116-48c9-98fe-0af1c68e1934 req-a757d5a0-3189-4418-a9b7-1612f35d97f6 service nova] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Detach interface failed, port_id=f906f838-ecf7-49d9-9645-270f550c7083, reason: Instance ba823cb8-570b-465f-a566-524b82ebc1ba could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1294.899074] env[69994]: DEBUG nova.network.neutron [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Port a983eff5-af52-4477-9645-db9812917bc7 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1295.212184] env[69994]: INFO nova.compute.manager [-] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Took 1.48 seconds to deallocate network for instance. [ 1295.718597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.718944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.719145] env[69994]: DEBUG nova.objects.instance [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lazy-loading 'resources' on Instance uuid ba823cb8-570b-465f-a566-524b82ebc1ba {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.919975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.919975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.920167] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.335653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f35c652-011a-46fb-b3fe-924a84f052be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.343814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0833d307-56c0-490d-83ee-1fb2d271b483 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.372934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8484242-3479-47bd-9395-6f1de0fa257d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.379771] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307da3a5-6390-41ed-8b4b-8deb4c5966be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.392426] env[69994]: DEBUG nova.compute.provider_tree [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.895542] env[69994]: DEBUG nova.scheduler.client.report [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1296.956828] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.956949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.957306] env[69994]: DEBUG nova.network.neutron [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1297.400509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.420837] env[69994]: INFO nova.scheduler.client.report [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleted allocations for instance ba823cb8-570b-465f-a566-524b82ebc1ba [ 1297.670905] env[69994]: DEBUG nova.network.neutron [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.696250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.696494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.696668] env[69994]: INFO nova.compute.manager [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Shelving [ 1297.929490] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30713e93-9844-41a6-a776-a92a0d23c4e8 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "ba823cb8-570b-465f-a566-524b82ebc1ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.834s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.173880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.684011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8905cb3-b595-4444-a49b-797872601bfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.691536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6354ba-e7da-435a-a675-4e6b12bdafa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.706204] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1298.708496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-628cddb1-5a32-499c-a042-3358204fa186 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.714960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "966e3672-f85b-467d-8821-1e14533ee629" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.715207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.715406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "966e3672-f85b-467d-8821-1e14533ee629-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.715585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.715750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.717268] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1298.717268] env[69994]: value = "task-2926585" [ 1298.717268] env[69994]: _type = "Task" [ 1298.717268] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.717717] env[69994]: INFO nova.compute.manager [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Terminating instance [ 1298.728366] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.225472] env[69994]: DEBUG nova.compute.manager [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1299.225908] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1299.227114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414785a3-1af6-4788-b626-838662ca8460 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.237921] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.242403] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9e8be7f-b96c-4fe4-8710-37becee8f48e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.244381] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926585, 'name': PowerOffVM_Task, 'duration_secs': 0.232317} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.245127] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.246917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a8be85-eecc-4902-abd7-f48ff240fa45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.255474] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1299.255474] env[69994]: value = "task-2926586" [ 1299.255474] env[69994]: _type = "Task" [ 1299.255474] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.290285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67164e0b-8dc9-4476-b987-957715802b16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.299525] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.766372] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926586, 'name': PowerOffVM_Task, 'duration_secs': 0.207486} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.767286] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.767286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1299.767454] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-784d69c8-3f7d-4f41-9407-6ad195dd3255 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.811309] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1299.812604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-843e9754-b820-40ea-927e-895df7d663c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.815145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169ab4d6-8823-47d1-a28a-d4073fc3a010 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.836446] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d67aa02-4a80-401d-8d90-149906290dab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.839338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1299.839533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1299.839710] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleting the datastore file [datastore1] 966e3672-f85b-467d-8821-1e14533ee629 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1299.840074] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1299.840074] env[69994]: value = "task-2926588" [ 1299.840074] env[69994]: _type = "Task" [ 1299.840074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.840302] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd366dfe-7517-4d6f-b9eb-8fe6f1e57107 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.847685] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1299.852438] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for the task: (returnval){ [ 1299.852438] env[69994]: value = "task-2926589" [ 1299.852438] env[69994]: _type = "Task" [ 1299.852438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.858912] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926588, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.863940] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.352284] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926588, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.355136] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.355455] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e66a4d8f-5e04-4aec-a968-4185c6a76dd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.365503] env[69994]: DEBUG oslo_vmware.api [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Task: {'id': task-2926589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.366722] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1300.366935] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1300.367190] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1300.367390] env[69994]: INFO nova.compute.manager [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1300.367647] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1300.367893] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1300.367893] env[69994]: value = "task-2926590" [ 1300.367893] env[69994]: _type = "Task" [ 1300.367893] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.368093] env[69994]: DEBUG nova.compute.manager [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1300.368192] env[69994]: DEBUG nova.network.neutron [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1300.378073] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.710826] env[69994]: DEBUG nova.compute.manager [req-14a74f77-2348-4cfe-80d8-3e957314d6a7 req-fd6b982f-ba6c-4a3b-a81c-6b2a0ca9816d service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Received event network-vif-deleted-372f3c43-b01a-4ba8-919b-804926d5fceb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.711105] env[69994]: INFO nova.compute.manager [req-14a74f77-2348-4cfe-80d8-3e957314d6a7 req-fd6b982f-ba6c-4a3b-a81c-6b2a0ca9816d service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Neutron deleted interface 372f3c43-b01a-4ba8-919b-804926d5fceb; detaching it from the instance and deleting it from the info cache [ 1300.711374] env[69994]: DEBUG nova.network.neutron [req-14a74f77-2348-4cfe-80d8-3e957314d6a7 req-fd6b982f-ba6c-4a3b-a81c-6b2a0ca9816d service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.852315] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926588, 'name': CreateSnapshot_Task, 'duration_secs': 0.883543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.852587] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1300.853367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b97a46-9711-4a78-9fe3-cbe2ed81e8a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.878875] env[69994]: DEBUG oslo_vmware.api [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926590, 'name': PowerOnVM_Task, 'duration_secs': 0.397019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.879057] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1300.879139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-88098053-7836-44bd-925b-f380a2f6c59d tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance 'ac72ed6c-15f7-47e3-83a0-abcd85bba128' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.191960] env[69994]: DEBUG nova.network.neutron [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.214792] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93410ca-70fc-400e-b39a-2b38b51c0ef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.226010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0f0692-f28c-4579-a1e2-9c365d423efd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.256648] env[69994]: DEBUG nova.compute.manager [req-14a74f77-2348-4cfe-80d8-3e957314d6a7 req-fd6b982f-ba6c-4a3b-a81c-6b2a0ca9816d service nova] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Detach interface failed, port_id=372f3c43-b01a-4ba8-919b-804926d5fceb, reason: Instance 966e3672-f85b-467d-8821-1e14533ee629 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1301.371251] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1301.371655] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6d1380a1-8abc-4cf5-9452-30ec6dee289a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.381143] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1301.381143] env[69994]: value = "task-2926591" [ 1301.381143] env[69994]: _type = "Task" [ 1301.381143] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.393986] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926591, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.694889] env[69994]: INFO nova.compute.manager [-] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Took 1.33 seconds to deallocate network for instance. [ 1301.892999] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926591, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.201968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.202339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.202596] env[69994]: DEBUG nova.objects.instance [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lazy-loading 'resources' on Instance uuid 966e3672-f85b-467d-8821-1e14533ee629 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.393126] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926591, 'name': CloneVM_Task, 'duration_secs': 0.967372} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.393631] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Created linked-clone VM from snapshot [ 1302.394274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b34e6-4bb3-4bf0-b31c-d7eb4fd6c80b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.402014] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Uploading image f87a1822-23bb-48ba-a487-e222570db3f4 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1302.428874] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1302.428874] env[69994]: value = "vm-587673" [ 1302.428874] env[69994]: _type = "VirtualMachine" [ 1302.428874] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1302.435318] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bf761d19-8991-4b92-9ebe-46bc3e7a1fdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.445664] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease: (returnval){ [ 1302.445664] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528cc417-87b1-57cb-1c89-f7dde0a0d8b0" [ 1302.445664] env[69994]: _type = "HttpNfcLease" [ 1302.445664] env[69994]: } obtained for exporting VM: (result){ [ 1302.445664] env[69994]: value = "vm-587673" [ 1302.445664] env[69994]: _type = "VirtualMachine" [ 1302.445664] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1302.446068] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the lease: (returnval){ [ 1302.446068] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528cc417-87b1-57cb-1c89-f7dde0a0d8b0" [ 1302.446068] env[69994]: _type = "HttpNfcLease" [ 1302.446068] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1302.452624] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1302.452624] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528cc417-87b1-57cb-1c89-f7dde0a0d8b0" [ 1302.452624] env[69994]: _type = "HttpNfcLease" [ 1302.452624] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1302.829229] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d3ce0f-26a9-4946-9bde-751c83673a99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.839383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb301b9-21a7-43b5-bccb-d1339b0e2e22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.873383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bb2be0-a254-4e40-ac33-20d834de2db8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.881944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b47dd1-13f9-4c74-9fd5-9ec98a47e26e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.897943] env[69994]: DEBUG nova.compute.provider_tree [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.955380] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1302.955380] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528cc417-87b1-57cb-1c89-f7dde0a0d8b0" [ 1302.955380] env[69994]: _type = "HttpNfcLease" [ 1302.955380] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1302.955695] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1302.955695] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528cc417-87b1-57cb-1c89-f7dde0a0d8b0" [ 1302.955695] env[69994]: _type = "HttpNfcLease" [ 1302.955695] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1302.956468] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbf17a1-2e94-44b2-835e-1e6a16f7da5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.963809] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1302.963999] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1303.056268] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-26453eb2-447f-475f-941c-64ae771e2550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.306546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.306937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.307212] env[69994]: DEBUG nova.compute.manager [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Going to confirm migration 8 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1303.401523] env[69994]: DEBUG nova.scheduler.client.report [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.892940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.894038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquired lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.894038] env[69994]: DEBUG nova.network.neutron [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.894038] env[69994]: DEBUG nova.objects.instance [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'info_cache' on Instance uuid ac72ed6c-15f7-47e3-83a0-abcd85bba128 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.910361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.931088] env[69994]: INFO nova.scheduler.client.report [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Deleted allocations for instance 966e3672-f85b-467d-8821-1e14533ee629 [ 1304.439573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6cbe407c-2133-4f18-b5ad-33fb8a7e1276 tempest-ServerRescueTestJSON-2043119363 tempest-ServerRescueTestJSON-2043119363-project-member] Lock "966e3672-f85b-467d-8821-1e14533ee629" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.724s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.360740] env[69994]: DEBUG nova.network.neutron [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [{"id": "a983eff5-af52-4477-9645-db9812917bc7", "address": "fa:16:3e:46:64:84", "network": {"id": "3aa2914d-f475-4425-bc38-e35df9eac761", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-76627345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c324e22a0046460b9ad3ad8578f7ef6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa983eff5-af", "ovs_interfaceid": "a983eff5-af52-4477-9645-db9812917bc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.865215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Releasing lock "refresh_cache-ac72ed6c-15f7-47e3-83a0-abcd85bba128" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.865215] env[69994]: DEBUG nova.objects.instance [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'migration_context' on Instance uuid ac72ed6c-15f7-47e3-83a0-abcd85bba128 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1306.369888] env[69994]: DEBUG nova.objects.base [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1306.370687] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f287e49-d153-48a3-a95b-73d020796aad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.394079] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03dbe9d7-11ba-47b6-a845-8ea362b8db76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.401144] env[69994]: DEBUG oslo_vmware.api [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1306.401144] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52948f1e-2f79-0a1c-9952-df53965493bb" [ 1306.401144] env[69994]: _type = "Task" [ 1306.401144] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.411308] env[69994]: DEBUG oslo_vmware.api [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52948f1e-2f79-0a1c-9952-df53965493bb, 'name': SearchDatastore_Task, 'duration_secs': 0.007951} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.411308] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.411308] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.026506] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56902a33-a96f-41eb-a62c-7a5ff3553a2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.034571] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd12190-ffed-432a-8b2f-955e794259e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.064714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a11109a-8b7f-4225-8a09-f1050196c70c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.072983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b998dd3c-f18c-41e2-abba-f73b1b10892d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.088142] env[69994]: DEBUG nova.compute.provider_tree [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1307.217493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.217791] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.217975] env[69994]: INFO nova.compute.manager [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Shelving [ 1307.591419] env[69994]: DEBUG nova.scheduler.client.report [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1308.231098] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1308.231098] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a8e5172-1203-49e3-b5bd-cd4e49444e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.238030] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1308.238030] env[69994]: value = "task-2926593" [ 1308.238030] env[69994]: _type = "Task" [ 1308.238030] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.245188] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.602433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.191s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.747818] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926593, 'name': PowerOffVM_Task, 'duration_secs': 0.190037} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.748106] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1308.749092] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ed5011-1b9f-4bb1-9c09-5a1b16ef9954 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.769697] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c89414-b5cf-4ea4-90a9-63c334ccd8e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.180514] env[69994]: INFO nova.scheduler.client.report [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocation for migration 3418c69e-00b9-44c4-995d-657144e1e4c7 [ 1309.283509] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1309.283509] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c74156f4-e593-4b43-bb87-bd5891e14562 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.289624] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1309.289624] env[69994]: value = "task-2926594" [ 1309.289624] env[69994]: _type = "Task" [ 1309.289624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.299613] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926594, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.688533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46673e8e-0e95-4744-82a4-aad0b2ef7585 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.381s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.736887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "aefb7903-afd1-4574-bec1-adab769728b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.736887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.801903] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926594, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.804134] env[69994]: INFO nova.compute.manager [None req-231cfecf-0011-44b8-9486-4d22ad710884 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Get console output [ 1309.804434] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-231cfecf-0011-44b8-9486-4d22ad710884 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] The console log is missing. Check your VSPC configuration [ 1310.063569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.063813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.240024] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1310.303021] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926594, 'name': CreateSnapshot_Task, 'duration_secs': 0.824761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.303021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1310.303021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1617d77d-4f4b-4be6-b1b1-4309a9fa7857 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.566717] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1310.763846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.764274] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.765988] env[69994]: INFO nova.compute.claims [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1310.819473] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1310.820205] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e5dffcff-5e57-4503-b7c0-55e38b09c7dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.830337] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1310.830337] env[69994]: value = "task-2926595" [ 1310.830337] env[69994]: _type = "Task" [ 1310.830337] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.838720] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926595, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.086670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.340206] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926595, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.842506] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926595, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.908464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9029dab8-3039-4ada-b04f-04ca93d0f94b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.915846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1991e61b-d563-47aa-9ee0-9243600f8a37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.947038] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03636775-53f2-48f5-ab5a-6131e1bf6a2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.954806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6484bee7-b431-4e64-a6bd-224c8b3d2a31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.968791] env[69994]: DEBUG nova.compute.provider_tree [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.009117] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1312.010059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae84df4-9119-44e2-ae2b-53ba0311ce32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.016868] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1312.017072] env[69994]: ERROR oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk due to incomplete transfer. [ 1312.017317] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-10e4df9b-2ff4-4ef1-b1a0-33106037d2c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.024329] env[69994]: DEBUG oslo_vmware.rw_handles [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf7d84-399f-a57d-9d16-04fb2c04c4f6/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1312.024560] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Uploaded image f87a1822-23bb-48ba-a487-e222570db3f4 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1312.026934] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1312.027204] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-642f8c8b-d0c6-4224-9940-b04638793d39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.033056] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1312.033056] env[69994]: value = "task-2926596" [ 1312.033056] env[69994]: _type = "Task" [ 1312.033056] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.040781] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926596, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.392382] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926595, 'name': CloneVM_Task, 'duration_secs': 1.355039} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.392382] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Created linked-clone VM from snapshot [ 1312.392382] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22f4629-1f8b-4753-8c94-7ed44451e246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.392382] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Uploading image 57b3393e-1b45-4b81-8d0b-45ac7731565f {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1312.404765] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1312.404765] env[69994]: value = "vm-587675" [ 1312.404765] env[69994]: _type = "VirtualMachine" [ 1312.404765] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1312.405058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8cbe1f0f-d7a6-4c03-9241-4a64c63b5fc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.414328] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease: (returnval){ [ 1312.414328] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ec6150-85ca-7a94-aa86-95a4f329b5d4" [ 1312.414328] env[69994]: _type = "HttpNfcLease" [ 1312.414328] env[69994]: } obtained for exporting VM: (result){ [ 1312.414328] env[69994]: value = "vm-587675" [ 1312.414328] env[69994]: _type = "VirtualMachine" [ 1312.414328] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1312.414726] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the lease: (returnval){ [ 1312.414726] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ec6150-85ca-7a94-aa86-95a4f329b5d4" [ 1312.414726] env[69994]: _type = "HttpNfcLease" [ 1312.414726] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1312.421518] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1312.421518] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ec6150-85ca-7a94-aa86-95a4f329b5d4" [ 1312.421518] env[69994]: _type = "HttpNfcLease" [ 1312.421518] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1312.473747] env[69994]: DEBUG nova.scheduler.client.report [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.541964] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926596, 'name': Destroy_Task, 'duration_secs': 0.387177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.542318] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Destroyed the VM [ 1312.542555] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1312.542795] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2a6e4a20-5be6-476a-bbdf-5032d151621d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.549173] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1312.549173] env[69994]: value = "task-2926598" [ 1312.549173] env[69994]: _type = "Task" [ 1312.549173] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.556396] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926598, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.750488] env[69994]: INFO nova.compute.manager [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Rebuilding instance [ 1312.790376] env[69994]: DEBUG nova.compute.manager [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.791374] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0832f467-0507-4baa-9b4a-3845bf69bdce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.923197] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1312.923197] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ec6150-85ca-7a94-aa86-95a4f329b5d4" [ 1312.923197] env[69994]: _type = "HttpNfcLease" [ 1312.923197] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1312.923487] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1312.923487] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ec6150-85ca-7a94-aa86-95a4f329b5d4" [ 1312.923487] env[69994]: _type = "HttpNfcLease" [ 1312.923487] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1312.924166] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635a1a23-b0ad-4fcb-b44c-fcdf3f6201a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.931334] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1312.931509] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1312.987395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.987898] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1312.991455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.905s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.992869] env[69994]: INFO nova.compute.claims [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1313.024776] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6bfa2d3a-b247-420d-9548-76f526bebfa1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.061474] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926598, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.497035] env[69994]: DEBUG nova.compute.utils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1313.501702] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1313.502108] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1313.562232] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926598, 'name': RemoveSnapshot_Task, 'duration_secs': 0.784875} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.562541] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1313.563041] env[69994]: DEBUG nova.compute.manager [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1313.563906] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03633553-79c0-498b-830d-0d5115a17953 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.581950] env[69994]: DEBUG nova.policy [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6a83397a40d4752826e9572ef3e2626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c3dcb9ec62247adb210b83c9de8bf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1313.805509] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1313.806736] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47c4c203-e9df-458e-b143-f8c4d37216ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.814120] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1313.814120] env[69994]: value = "task-2926599" [ 1313.814120] env[69994]: _type = "Task" [ 1313.814120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.825990] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.853219] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Successfully created port: f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1314.002743] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1314.082019] env[69994]: INFO nova.compute.manager [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Shelve offloading [ 1314.146122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a46886c-9873-4097-8335-c23d11e56d81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.154652] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd05d57-4215-40a4-b887-5e53415eef0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.187811] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33f9503-af2e-429f-90fc-9eed14bcb29b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.197049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e642999d-b2f1-4ff5-892a-6c7b648bbc50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.212619] env[69994]: DEBUG nova.compute.provider_tree [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1314.323922] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926599, 'name': PowerOffVM_Task, 'duration_secs': 0.17644} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.324543] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1314.324935] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.327074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7251c0f9-cdb3-4e79-87c9-af5221326f0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.332301] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.332581] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1abec878-397b-4218-8db9-546745ab3a2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.389653] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1314.389980] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1314.390390] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleting the datastore file [datastore2] 328868f0-2fe9-4c04-a669-54b073c53b14 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1314.390738] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a0d5550-1aa7-4791-a9d4-a1a2f8ac49b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.396979] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1314.396979] env[69994]: value = "task-2926601" [ 1314.396979] env[69994]: _type = "Task" [ 1314.396979] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.405016] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.589476] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.590792] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42b5e14f-0792-4e87-87a7-3d7ff6bdc35b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.598505] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1314.598505] env[69994]: value = "task-2926602" [ 1314.598505] env[69994]: _type = "Task" [ 1314.598505] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.607739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1314.608257] env[69994]: DEBUG nova.compute.manager [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1314.609289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c00b32-878e-466c-9ccd-eed3eba5ef85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.615291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.615546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.615788] env[69994]: DEBUG nova.network.neutron [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.716407] env[69994]: DEBUG nova.scheduler.client.report [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1314.907746] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148343} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.909171] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1314.909171] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1314.909171] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.014854] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1315.042353] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1315.042676] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1315.042896] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1315.043149] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1315.043348] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1315.043585] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1315.043801] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1315.043985] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1315.044220] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1315.044510] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1315.044728] env[69994]: DEBUG nova.virt.hardware [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1315.045725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7de0ba-8235-446a-8877-0b63e2a9ccb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.054558] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d4138c-8d14-43ac-a598-d26b74dfe855 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.221790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.222470] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1315.402420] env[69994]: DEBUG nova.compute.manager [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Received event network-vif-plugged-f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1315.402644] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] Acquiring lock "aefb7903-afd1-4574-bec1-adab769728b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.402861] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] Lock "aefb7903-afd1-4574-bec1-adab769728b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.403047] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] Lock "aefb7903-afd1-4574-bec1-adab769728b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.403219] env[69994]: DEBUG nova.compute.manager [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] No waiting events found dispatching network-vif-plugged-f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1315.403383] env[69994]: WARNING nova.compute.manager [req-bac84cb5-397e-48b6-a84d-96d50ac6f5fd req-9213993f-0e31-4151-bf0a-ffb5342bcbcb service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Received unexpected event network-vif-plugged-f7b1d9c4-f85a-4f93-a48a-87a59a84831b for instance with vm_state building and task_state spawning. [ 1315.517959] env[69994]: DEBUG nova.network.neutron [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.559817] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Successfully updated port: f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1315.727366] env[69994]: DEBUG nova.compute.utils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1315.729514] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1315.729726] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1315.805654] env[69994]: DEBUG nova.policy [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6a83397a40d4752826e9572ef3e2626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c3dcb9ec62247adb210b83c9de8bf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1315.949333] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1315.949609] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1315.949770] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1315.949951] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1315.950117] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1315.950271] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1315.950484] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1315.950643] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1315.950813] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1315.950978] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1315.951174] env[69994]: DEBUG nova.virt.hardware [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1315.952148] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ccfbe8-b27e-4e98-8a09-29a26e283019 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.960541] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0541a22a-aed1-469f-a52e-0db17d4ffa60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.974650] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:56:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3759990-8ec1-401b-9393-767859b0a13f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.982241] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1315.982527] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1315.983185] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d648e882-596a-4b69-8c01-1fd4fcd8e217 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.002640] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1316.002640] env[69994]: value = "task-2926603" [ 1316.002640] env[69994]: _type = "Task" [ 1316.002640] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.010567] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926603, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.020306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.065651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.065976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.066304] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1316.174243] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Successfully created port: 30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1316.233212] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1316.513642] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926603, 'name': CreateVM_Task, 'duration_secs': 0.359819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.513888] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1316.514470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.514637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.514986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1316.515266] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a78917d0-6407-48ee-9f9c-002a3ea0c031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.519935] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1316.519935] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d93a83-ee4a-a340-5a74-893283b6d47a" [ 1316.519935] env[69994]: _type = "Task" [ 1316.519935] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.527749] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d93a83-ee4a-a340-5a74-893283b6d47a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.594135] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1316.595095] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e5aca0-3af9-4558-b13f-1b81c48776ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.602583] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.602837] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61f71dd7-b621-444e-80c2-9ad97513f0ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.604956] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1316.671993] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.671993] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.672246] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore1] 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.672611] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff65eeac-eacb-4d42-bdc6-8a14e5b3f6a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.682673] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1316.682673] env[69994]: value = "task-2926605" [ 1316.682673] env[69994]: _type = "Task" [ 1316.682673] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.691271] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.746288] env[69994]: DEBUG nova.network.neutron [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [{"id": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "address": "fa:16:3e:24:3f:e1", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b1d9c4-f8", "ovs_interfaceid": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.029910] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d93a83-ee4a-a340-5a74-893283b6d47a, 'name': SearchDatastore_Task, 'duration_secs': 0.010541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.030251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.030485] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1317.030717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.030860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.031044] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.031317] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a983dec-f7a7-49e5-bbeb-f04a65f064bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.039401] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.039592] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1317.040617] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6638d0af-2f3c-42af-8196-46288aefbf54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.045545] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1317.045545] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523d22c0-b672-3031-11f5-23741a9f24a2" [ 1317.045545] env[69994]: _type = "Task" [ 1317.045545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.053066] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523d22c0-b672-3031-11f5-23741a9f24a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.193779] env[69994]: DEBUG oslo_vmware.api [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139855} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.194077] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1317.194267] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1317.194445] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1317.220534] env[69994]: INFO nova.scheduler.client.report [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted allocations for instance 94169894-f772-41c9-95a1-ddf622f2c9f6 [ 1317.246048] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1317.248322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.248609] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Instance network_info: |[{"id": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "address": "fa:16:3e:24:3f:e1", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b1d9c4-f8", "ovs_interfaceid": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1317.249250] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:3f:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7b1d9c4-f85a-4f93-a48a-87a59a84831b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1317.256964] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating folder: Project (2c3dcb9ec62247adb210b83c9de8bf96). Parent ref: group-v587342. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1317.257990] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea8cc0c4-ba74-4e1f-9f30-85c79f665bb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.270193] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created folder: Project (2c3dcb9ec62247adb210b83c9de8bf96) in parent group-v587342. [ 1317.270409] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating folder: Instances. Parent ref: group-v587677. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1317.272585] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1317.272807] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1317.272963] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1317.273212] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1317.273370] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1317.273520] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1317.273726] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1317.273885] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1317.274072] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1317.274250] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1317.274416] env[69994]: DEBUG nova.virt.hardware [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1317.274672] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b4ff304-c661-4fa1-9a21-3fb9645efbdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.276820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11c8d87-c6c6-4308-ab77-996d318cd385 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.285275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aca8f3f-87c1-485c-bede-92cc569e56b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.289909] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created folder: Instances in parent group-v587677. [ 1317.290159] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1317.290690] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1317.290903] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29b3f13f-7c8c-43a9-840b-c77cccfc0502 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.318293] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1317.318293] env[69994]: value = "task-2926608" [ 1317.318293] env[69994]: _type = "Task" [ 1317.318293] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.325871] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926608, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.438926] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Received event network-changed-f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1317.439106] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Refreshing instance network info cache due to event network-changed-f7b1d9c4-f85a-4f93-a48a-87a59a84831b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1317.439328] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Acquiring lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.439847] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Acquired lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.440045] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Refreshing network info cache for port f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.555437] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]523d22c0-b672-3031-11f5-23741a9f24a2, 'name': SearchDatastore_Task, 'duration_secs': 0.008062} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.556250] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-804aff1f-dca6-4287-9dcb-a7e0bb392d92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.561417] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1317.561417] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a19387-51dd-1065-9f47-a33181879ec2" [ 1317.561417] env[69994]: _type = "Task" [ 1317.561417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.569077] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a19387-51dd-1065-9f47-a33181879ec2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.724859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.725199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.725430] env[69994]: DEBUG nova.objects.instance [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'resources' on Instance uuid 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.828860] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926608, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.874040] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Successfully updated port: 30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.072501] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52a19387-51dd-1065-9f47-a33181879ec2, 'name': SearchDatastore_Task, 'duration_secs': 0.008775} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.072845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.073011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1318.073315] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad9c2f8b-113f-495b-a98b-31bddd2b0606 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.080525] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1318.080525] env[69994]: value = "task-2926609" [ 1318.080525] env[69994]: _type = "Task" [ 1318.080525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.088791] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.226357] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updated VIF entry in instance network info cache for port f7b1d9c4-f85a-4f93-a48a-87a59a84831b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1318.226851] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [{"id": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "address": "fa:16:3e:24:3f:e1", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b1d9c4-f8", "ovs_interfaceid": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.228625] env[69994]: DEBUG nova.objects.instance [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'numa_topology' on Instance uuid 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.331028] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926608, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.376472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.376651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1318.376851] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1318.591300] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447991} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.591607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1318.591841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1318.592166] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14b5d270-95bc-4078-b9e0-ec4e087976f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.598582] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1318.598582] env[69994]: value = "task-2926610" [ 1318.598582] env[69994]: _type = "Task" [ 1318.598582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.607251] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.731423] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Releasing lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.731808] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-vif-unplugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.732104] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.732408] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.732644] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.732874] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] No waiting events found dispatching network-vif-unplugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1318.733154] env[69994]: WARNING nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received unexpected event network-vif-unplugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 for instance with vm_state shelved_offloaded and task_state None. [ 1318.733432] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.733786] env[69994]: DEBUG nova.compute.manager [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing instance network info cache due to event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1318.734094] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.734685] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1318.734895] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1318.736653] env[69994]: DEBUG nova.objects.base [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Object Instance<94169894-f772-41c9-95a1-ddf622f2c9f6> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1318.831124] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926608, 'name': CreateVM_Task, 'duration_secs': 1.42344} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.833769] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1318.834672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.834886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1318.835168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1318.835425] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cba8866-0206-4c48-9e30-0caadf3a63ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.839731] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1318.839731] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225f9a6-bc0e-f213-ade6-34ca845e14f6" [ 1318.839731] env[69994]: _type = "Task" [ 1318.839731] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.850692] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225f9a6-bc0e-f213-ade6-34ca845e14f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.855371] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f72be9-a2be-4cd4-869a-54f9539e1c91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.862012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca91b0b-819a-4497-96ba-566bd1335ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.896284] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916c9737-d000-4b5e-8166-d54bcfc393b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.903802] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa71e8e-2a9b-4f9d-beda-4f59f20ee700 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.917430] env[69994]: DEBUG nova.compute.provider_tree [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.927303] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.083618] env[69994]: DEBUG nova.network.neutron [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Updating instance_info_cache with network_info: [{"id": "30d5769c-ab0c-4501-b426-4747886e04e6", "address": "fa:16:3e:f2:02:65", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d5769c-ab", "ovs_interfaceid": "30d5769c-ab0c-4501-b426-4747886e04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.109101] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.109395] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1319.110243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb3982e-5eae-4fa5-9300-98c600bf341c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.132048] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1319.132402] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fb27c56-8263-41c7-9238-d0b1fc64ef9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.152903] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1319.152903] env[69994]: value = "task-2926611" [ 1319.152903] env[69994]: _type = "Task" [ 1319.152903] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.160645] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926611, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.350078] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5225f9a6-bc0e-f213-ade6-34ca845e14f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010346} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.350449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1319.350780] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1319.351104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.351304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.351542] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1319.352132] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccd69d8c-a9d7-4372-9826-9c8429a3d99e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.360757] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1319.360930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1319.361663] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a51c58a-90ce-4608-a9e4-a9dbf23a0cad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.369038] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1319.369038] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fdafd2-3d43-3d7b-ad3d-cf61c7aeebc4" [ 1319.369038] env[69994]: _type = "Task" [ 1319.369038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.376971] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fdafd2-3d43-3d7b-ad3d-cf61c7aeebc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.420856] env[69994]: DEBUG nova.scheduler.client.report [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.586935] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1319.587439] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Instance network_info: |[{"id": "30d5769c-ab0c-4501-b426-4747886e04e6", "address": "fa:16:3e:f2:02:65", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d5769c-ab", "ovs_interfaceid": "30d5769c-ab0c-4501-b426-4747886e04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1319.588017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:02:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30d5769c-ab0c-4501-b426-4747886e04e6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1319.596530] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1319.596769] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1319.597012] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbd8937c-cc40-4e3f-bb4a-cdceae08d169 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.617621] env[69994]: DEBUG nova.compute.manager [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Received event network-vif-plugged-30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1319.617621] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Acquiring lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.617842] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.617944] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.618098] env[69994]: DEBUG nova.compute.manager [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] No waiting events found dispatching network-vif-plugged-30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1319.618290] env[69994]: WARNING nova.compute.manager [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Received unexpected event network-vif-plugged-30d5769c-ab0c-4501-b426-4747886e04e6 for instance with vm_state building and task_state spawning. [ 1319.618411] env[69994]: DEBUG nova.compute.manager [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Received event network-changed-30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1319.618553] env[69994]: DEBUG nova.compute.manager [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Refreshing instance network info cache due to event network-changed-30d5769c-ab0c-4501-b426-4747886e04e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1319.618728] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Acquiring lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.618858] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Acquired lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.619016] env[69994]: DEBUG nova.network.neutron [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Refreshing network info cache for port 30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.624024] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1319.624024] env[69994]: value = "task-2926612" [ 1319.624024] env[69994]: _type = "Task" [ 1319.624024] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.633466] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926612, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.664435] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926611, 'name': ReconfigVM_Task, 'duration_secs': 0.281265} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.664690] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 328868f0-2fe9-4c04-a669-54b073c53b14/328868f0-2fe9-4c04-a669-54b073c53b14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.665598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bffffe3e-722a-44e1-9f24-d120eb015c48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.672389] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1319.672389] env[69994]: value = "task-2926613" [ 1319.672389] env[69994]: _type = "Task" [ 1319.672389] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.680687] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926613, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.764692] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updated VIF entry in instance network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1319.765271] env[69994]: DEBUG nova.network.neutron [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.836379] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.880722] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52fdafd2-3d43-3d7b-ad3d-cf61c7aeebc4, 'name': SearchDatastore_Task, 'duration_secs': 0.008972} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.881486] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f11f98e5-d32a-4555-a537-93e1f8843411 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.887588] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1319.887588] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e0e63-2723-6420-eb29-85a9b99e4829" [ 1319.887588] env[69994]: _type = "Task" [ 1319.887588] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.896825] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e0e63-2723-6420-eb29-85a9b99e4829, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.925597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.138260] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926612, 'name': CreateVM_Task, 'duration_secs': 0.366065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.138260] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1320.138583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.138761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.139089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1320.139413] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c865d04f-f088-448d-b247-9015233866ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.144725] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1320.144725] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528bcf74-deab-ec5c-bf02-e37273a83dcb" [ 1320.144725] env[69994]: _type = "Task" [ 1320.144725] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.154088] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528bcf74-deab-ec5c-bf02-e37273a83dcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.184036] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926613, 'name': Rename_Task, 'duration_secs': 0.173353} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.184333] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.184593] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8094a1e-8a54-421a-99c9-63591e0f5042 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.190540] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1320.190540] env[69994]: value = "task-2926614" [ 1320.190540] env[69994]: _type = "Task" [ 1320.190540] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.197792] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.271332] env[69994]: DEBUG oslo_concurrency.lockutils [req-29edfcf8-30e6-45d7-bcf7-e0955afc7457 req-66362aff-1a3d-4e18-aa82-c239c024bb88 service nova] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.287854] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1320.289818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3f5850-94cd-47d1-86ce-d78a63e41c61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.296191] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1320.296720] env[69994]: ERROR oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk due to incomplete transfer. [ 1320.297030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b214ea8f-8449-4616-80cb-40ca4e9553e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.307287] env[69994]: DEBUG oslo_vmware.rw_handles [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5295a49a-d103-c84b-9619-85606ae6e699/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1320.307546] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Uploaded image 57b3393e-1b45-4b81-8d0b-45ac7731565f to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1320.309942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1320.310381] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f3bc0e89-376f-4433-a2f8-a2df395baef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.315479] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1320.315479] env[69994]: value = "task-2926615" [ 1320.315479] env[69994]: _type = "Task" [ 1320.315479] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.323472] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926615, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.359968] env[69994]: DEBUG nova.network.neutron [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Updated VIF entry in instance network info cache for port 30d5769c-ab0c-4501-b426-4747886e04e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1320.360376] env[69994]: DEBUG nova.network.neutron [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Updating instance_info_cache with network_info: [{"id": "30d5769c-ab0c-4501-b426-4747886e04e6", "address": "fa:16:3e:f2:02:65", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d5769c-ab", "ovs_interfaceid": "30d5769c-ab0c-4501-b426-4747886e04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.397973] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529e0e63-2723-6420-eb29-85a9b99e4829, 'name': SearchDatastore_Task, 'duration_secs': 0.009877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.398239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.398530] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/aefb7903-afd1-4574-bec1-adab769728b5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1320.398786] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf45f65f-6989-425b-adb2-ce57adc3993e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.404823] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1320.404823] env[69994]: value = "task-2926616" [ 1320.404823] env[69994]: _type = "Task" [ 1320.404823] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.412291] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.432630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7eb9f2d1-8455-4248-827a-d0552f87d830 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.736s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.433495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.597s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.433677] env[69994]: INFO nova.compute.manager [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Unshelving [ 1320.654955] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]528bcf74-deab-ec5c-bf02-e37273a83dcb, 'name': SearchDatastore_Task, 'duration_secs': 0.009871} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.655299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.655572] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1320.655813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.655960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.656158] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1320.656441] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7398691e-1ad8-4a51-b160-f1e1f0680675 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.670658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1320.670934] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1320.671726] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5650099b-5113-474b-abe2-828b43496aa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.678890] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1320.678890] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52783ffc-e1f8-5de0-eb9c-ab99f98e8cee" [ 1320.678890] env[69994]: _type = "Task" [ 1320.678890] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.689281] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52783ffc-e1f8-5de0-eb9c-ab99f98e8cee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.700182] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.825264] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926615, 'name': Destroy_Task, 'duration_secs': 0.361957} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.825546] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Destroyed the VM [ 1320.825789] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1320.826051] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-78cd8fd6-e7d5-44d7-bd7d-c6389ce3bbeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.831944] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1320.831944] env[69994]: value = "task-2926617" [ 1320.831944] env[69994]: _type = "Task" [ 1320.831944] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.839244] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926617, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.862890] env[69994]: DEBUG oslo_concurrency.lockutils [req-bda995c7-d7c1-453f-93a9-e10d576ac8b6 req-e5c4ef60-0659-42c0-8145-837fdf073105 service nova] Releasing lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.914195] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926616, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.914443] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/aefb7903-afd1-4574-bec1-adab769728b5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1320.914627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1320.914877] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc8d2407-e1fe-4a9b-bc20-d7ef743d12fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.921604] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1320.921604] env[69994]: value = "task-2926618" [ 1320.921604] env[69994]: _type = "Task" [ 1320.921604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.929876] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.190328] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52783ffc-e1f8-5de0-eb9c-ab99f98e8cee, 'name': SearchDatastore_Task, 'duration_secs': 0.048286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.191120] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be85bed2-dade-4118-b96d-26d5f948e0a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.198414] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1321.198414] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520b48dd-255d-2b42-1271-a0f748274dfc" [ 1321.198414] env[69994]: _type = "Task" [ 1321.198414] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.201565] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.209460] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520b48dd-255d-2b42-1271-a0f748274dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.008657} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.209687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.209933] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/27d2bf57-80ec-4bc4-b87b-560f7dfd6524.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1321.210198] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0400edd7-7178-43ac-9388-615dcb6affb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.215770] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1321.215770] env[69994]: value = "task-2926619" [ 1321.215770] env[69994]: _type = "Task" [ 1321.215770] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.222612] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.341963] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926617, 'name': RemoveSnapshot_Task, 'duration_secs': 0.342094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.342363] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1321.342654] env[69994]: DEBUG nova.compute.manager [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1321.343494] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dace0d9e-dd8d-41f6-a32b-c3d6c6323adb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.431721] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062562} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.432046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1321.432912] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6a781a-6dc8-4353-a406-ed785798a5e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.460155] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/aefb7903-afd1-4574-bec1-adab769728b5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1321.460929] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7fd0909-2dd7-475e-af8e-94fdd339fdfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.477778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.478099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.478336] env[69994]: DEBUG nova.objects.instance [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'pci_requests' on Instance uuid 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.482874] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1321.482874] env[69994]: value = "task-2926620" [ 1321.482874] env[69994]: _type = "Task" [ 1321.482874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.492451] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926620, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.702490] env[69994]: DEBUG oslo_vmware.api [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926614, 'name': PowerOnVM_Task, 'duration_secs': 1.151232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.702766] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.702981] env[69994]: DEBUG nova.compute.manager [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1321.703755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38011f83-f877-42c5-a41a-7f73ff71d106 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.723900] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469806} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.724181] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/27d2bf57-80ec-4bc4-b87b-560f7dfd6524.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1321.724546] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1321.724842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3abdf9cc-98fd-4b85-8994-e327e396ce12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.730918] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1321.730918] env[69994]: value = "task-2926621" [ 1321.730918] env[69994]: _type = "Task" [ 1321.730918] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.738674] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.856026] env[69994]: INFO nova.compute.manager [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Shelve offloading [ 1321.982978] env[69994]: DEBUG nova.objects.instance [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'numa_topology' on Instance uuid 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.995853] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926620, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.218589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.240204] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067334} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.240465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1322.241223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf16ddd2-6f6f-4f3e-8536-9494ca9d369d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.265998] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/27d2bf57-80ec-4bc4-b87b-560f7dfd6524.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1322.266254] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f86a670-d60d-4504-ad22-e5b75dbe0e4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.285392] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1322.285392] env[69994]: value = "task-2926622" [ 1322.285392] env[69994]: _type = "Task" [ 1322.285392] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.293371] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.359597] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1322.359927] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7f5e574-134a-416c-926f-25b18088866a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.366520] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1322.366520] env[69994]: value = "task-2926623" [ 1322.366520] env[69994]: _type = "Task" [ 1322.366520] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.374077] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.488252] env[69994]: INFO nova.compute.claims [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.496042] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926620, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.795994] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.877392] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1322.877621] env[69994]: DEBUG nova.compute.manager [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1322.878482] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce69c7e-a791-4a1a-80bb-3b791fbae8a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.884515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.884665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.884850] env[69994]: DEBUG nova.network.neutron [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.995831] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926620, 'name': ReconfigVM_Task, 'duration_secs': 1.261931} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.996291] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfigured VM instance instance-0000007b to attach disk [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/aefb7903-afd1-4574-bec1-adab769728b5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1322.996865] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-811f647c-d1af-4a21-8305-3458a3b152cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.003730] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1323.003730] env[69994]: value = "task-2926624" [ 1323.003730] env[69994]: _type = "Task" [ 1323.003730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.014380] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926624, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.296424] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926622, 'name': ReconfigVM_Task, 'duration_secs': 0.576082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.296721] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/27d2bf57-80ec-4bc4-b87b-560f7dfd6524.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1323.297375] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a7f24c0-0003-4a07-8121-8d2dee103f01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.303795] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1323.303795] env[69994]: value = "task-2926625" [ 1323.303795] env[69994]: _type = "Task" [ 1323.303795] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.312732] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926625, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.513258] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926624, 'name': Rename_Task, 'duration_secs': 0.130233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.513515] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1323.513752] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ee67e3d-4730-46d5-8398-7b558d60d3c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.522618] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1323.522618] env[69994]: value = "task-2926626" [ 1323.522618] env[69994]: _type = "Task" [ 1323.522618] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.531324] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926626, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.595034] env[69994]: DEBUG nova.network.neutron [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.614227] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530585e2-01fe-42d1-be24-bd6cd2a8e766 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.621738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd3dcfd-4220-4603-8da3-d8e503da2369 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.653254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c025517-c10a-4127-8446-1efbcf600135 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.661449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2be8037-ff37-427d-9900-d25a0f389e25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.675256] env[69994]: DEBUG nova.compute.provider_tree [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.814859] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926625, 'name': Rename_Task, 'duration_secs': 0.127311} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.815050] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1323.815225] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5d0d32f-1711-4fb1-a348-ccbe4f81ffba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.822444] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1323.822444] env[69994]: value = "task-2926627" [ 1323.822444] env[69994]: _type = "Task" [ 1323.822444] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.833046] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.032697] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926626, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.098815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.179300] env[69994]: DEBUG nova.scheduler.client.report [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1324.336567] env[69994]: DEBUG oslo_vmware.api [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926627, 'name': PowerOnVM_Task, 'duration_secs': 0.457895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.336567] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1324.336567] env[69994]: INFO nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Took 7.09 seconds to spawn the instance on the hypervisor. [ 1324.336567] env[69994]: DEBUG nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1324.336567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0ca413-98d6-4668-84cd-84e53160b3f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.351638] env[69994]: DEBUG nova.compute.manager [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-vif-unplugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1324.351854] env[69994]: DEBUG oslo_concurrency.lockutils [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.352095] env[69994]: DEBUG oslo_concurrency.lockutils [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.352327] env[69994]: DEBUG oslo_concurrency.lockutils [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.352572] env[69994]: DEBUG nova.compute.manager [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] No waiting events found dispatching network-vif-unplugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1324.352797] env[69994]: WARNING nova.compute.manager [req-52cccaee-93f8-4540-a527-143dd8f26d70 req-b74c7f0e-2c56-45f2-8c8f-38a558ed6987 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received unexpected event network-vif-unplugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 for instance with vm_state shelved and task_state shelving_offloading. [ 1324.493036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.493970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580ec9e3-17ef-4d0c-b539-a7b9dd78b3ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.501348] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.501561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9b4b932-b810-4189-9e14-0b8641daaa33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.530868] env[69994]: DEBUG oslo_vmware.api [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926626, 'name': PowerOnVM_Task, 'duration_secs': 0.697551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.531133] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1324.531337] env[69994]: INFO nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1324.531519] env[69994]: DEBUG nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1324.532288] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a146bd-c7c8-4091-b97e-c9ba85b76b97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.572252] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.572531] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.572736] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleting the datastore file [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.573281] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b30b8e1-69de-4545-ab33-e78bc936dec5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.579201] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1324.579201] env[69994]: value = "task-2926629" [ 1324.579201] env[69994]: _type = "Task" [ 1324.579201] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.587491] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926629, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.686931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.209s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.689448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.471s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.689651] env[69994]: DEBUG nova.objects.instance [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1324.720184] env[69994]: INFO nova.network.neutron [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating port 6645c1c7-f316-403a-98aa-8b2cca92f8e4 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1324.852330] env[69994]: INFO nova.compute.manager [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Took 13.78 seconds to build instance. [ 1325.046900] env[69994]: INFO nova.compute.manager [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Took 14.30 seconds to build instance. [ 1325.088959] env[69994]: DEBUG oslo_vmware.api [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129876} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.089245] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.089451] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.089638] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.112983] env[69994]: INFO nova.scheduler.client.report [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted allocations for instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd [ 1325.357657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c22b7a95-c06a-4618-a19f-55d3414f2cf5 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.294s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.358057] env[69994]: INFO nova.compute.manager [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Rescuing [ 1325.358297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.358484] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1325.358656] env[69994]: DEBUG nova.network.neutron [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1325.548753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc006b51-a03d-4149-85a0-23847aea35d1 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.812s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.617327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.698307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-280af919-61be-4f9a-b218-e79caa0f344e tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.699456] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.082s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.699688] env[69994]: DEBUG nova.objects.instance [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'resources' on Instance uuid ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.045241] env[69994]: DEBUG nova.network.neutron [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Updating instance_info_cache with network_info: [{"id": "30d5769c-ab0c-4501-b426-4747886e04e6", "address": "fa:16:3e:f2:02:65", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30d5769c-ab", "ovs_interfaceid": "30d5769c-ab0c-4501-b426-4747886e04e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.203916] env[69994]: DEBUG nova.objects.instance [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'numa_topology' on Instance uuid ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.225668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.225668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1326.225668] env[69994]: DEBUG nova.network.neutron [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.381132] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1326.381352] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing instance network info cache due to event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1326.381625] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.381700] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1326.381867] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1326.547929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-27d2bf57-80ec-4bc4-b87b-560f7dfd6524" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1326.707052] env[69994]: DEBUG nova.objects.base [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1326.818144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7664b0c-c27b-4713-b9c6-5a963e832c68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.827023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3354f3b-6750-4324-863d-d24d680e0972 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.858789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896db096-3823-4c6f-a91f-1878094d1def {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.867050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1b7b79-5cdd-4ed5-9c05-fdc0da08eaac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.880961] env[69994]: DEBUG nova.compute.provider_tree [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.023315] env[69994]: DEBUG nova.network.neutron [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.088230] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updated VIF entry in instance network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1327.088600] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": null, "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap090b588e-3c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.387970] env[69994]: DEBUG nova.scheduler.client.report [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1327.525956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.552603] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4aadc565b4aaeabb0932b6d0b21ac213',container_format='bare',created_at=2025-03-11T12:41:59Z,direct_url=,disk_format='vmdk',id=f87a1822-23bb-48ba-a487-e222570db3f4,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2107617140-shelved',owner='1a10b55bcc104c108604d402ec6d09ce',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-11T12:42:13Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1327.552834] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.552991] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1327.553190] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.553339] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1327.553484] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1327.553688] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1327.553937] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1327.554138] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1327.554309] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1327.554514] env[69994]: DEBUG nova.virt.hardware [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1327.555360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b2c67-ebd5-4935-86ff-5d9eca26efcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.563729] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c007fc58-2c59-479e-bd15-69a2de1e3463 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.576402] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:5a:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52c1f5eb-3d4a-4faa-a30d-2b0a46430791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6645c1c7-f316-403a-98aa-8b2cca92f8e4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.583782] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1327.585491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.585725] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9096f4a-079a-4613-aed1-a00c6056d986 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.601802] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.602058] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1327.602254] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.602456] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.602616] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.602778] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] No waiting events found dispatching network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1327.602941] env[69994]: WARNING nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received unexpected event network-vif-plugged-6645c1c7-f316-403a-98aa-8b2cca92f8e4 for instance with vm_state shelved_offloaded and task_state spawning. [ 1327.603117] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1327.603272] env[69994]: DEBUG nova.compute.manager [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing instance network info cache due to event network-changed-6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1327.603452] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Acquiring lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.603587] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Acquired lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.603740] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Refreshing network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.610760] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.610760] env[69994]: value = "task-2926630" [ 1327.610760] env[69994]: _type = "Task" [ 1327.610760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.619163] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926630, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.688750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.892594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.106217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.108841] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47c16cff-8b85-4108-bbf0-c2bf5a8a5013 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.117751] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1328.117751] env[69994]: value = "task-2926631" [ 1328.117751] env[69994]: _type = "Task" [ 1328.117751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.120921] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926630, 'name': CreateVM_Task, 'duration_secs': 0.344675} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.123872] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1328.124528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.124696] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.125080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1328.125634] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-275a9db3-dba4-4d84-8461-01c480f4bb61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.130141] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.132876] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1328.132876] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520f0eb8-cadf-2255-a461-7e739f47c5ae" [ 1328.132876] env[69994]: _type = "Task" [ 1328.132876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.142369] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520f0eb8-cadf-2255-a461-7e739f47c5ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.402218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-474dea03-cc36-43c3-8727-c63476422941 tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.183s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.402633] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.714s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.402633] env[69994]: INFO nova.compute.manager [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Unshelving [ 1328.455159] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updated VIF entry in instance network info cache for port 6645c1c7-f316-403a-98aa-8b2cca92f8e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.455523] env[69994]: DEBUG nova.network.neutron [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [{"id": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "address": "fa:16:3e:98:5a:72", "network": {"id": "97acfd06-1881-4a91-9c7d-b3527a698b7b", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1729163136-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a10b55bcc104c108604d402ec6d09ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52c1f5eb-3d4a-4faa-a30d-2b0a46430791", "external-id": "nsx-vlan-transportzone-775", "segmentation_id": 775, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6645c1c7-f3", "ovs_interfaceid": "6645c1c7-f316-403a-98aa-8b2cca92f8e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.631901] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926631, 'name': PowerOffVM_Task, 'duration_secs': 0.328202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.632236] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1328.633143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68476240-4757-4cd0-aaaf-db1d10d6851e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.659434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1328.659762] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Processing image f87a1822-23bb-48ba-a487-e222570db3f4 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.660055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.660255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.660462] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1328.660844] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13e2d1aa-5897-4d22-aa67-67d7d424559c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.663398] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578ca491-f5a1-4021-9e98-b951aab1b2de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.682155] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1328.682371] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1328.683231] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4db0619a-e413-483b-bdd5-4cf03d76a2d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.689745] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1328.689745] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529736a3-f6c9-9c65-8210-311f8aa7521e" [ 1328.689745] env[69994]: _type = "Task" [ 1328.689745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.697268] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.697548] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d02b3408-46b9-44cb-ab3c-edec2fed0404 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.702192] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529736a3-f6c9-9c65-8210-311f8aa7521e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.707218] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1328.707218] env[69994]: value = "task-2926632" [ 1328.707218] env[69994]: _type = "Task" [ 1328.707218] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.715241] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.958764] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffa9101c-c51c-4da7-86f9-f7ed2050a0e7 req-7479e397-5c7c-4230-abbb-59eaca7a96d6 service nova] Releasing lock "refresh_cache-94169894-f772-41c9-95a1-ddf622f2c9f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.200991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1329.201288] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Fetch image to [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65/OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1329.201462] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Downloading stream optimized image f87a1822-23bb-48ba-a487-e222570db3f4 to [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65/OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1329.201713] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Downloading image file data f87a1822-23bb-48ba-a487-e222570db3f4 to the ESX as VM named 'OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1329.216730] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1329.216942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1329.217211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.217362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.217547] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1329.217907] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1627278c-044f-449f-a34d-f070845f5580 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.244204] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1329.244412] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1329.245128] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a068d41f-8e12-4e60-811f-4db26d99ec33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.249998] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1329.249998] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522afe8b-683b-b433-6a40-9902bea84400" [ 1329.249998] env[69994]: _type = "Task" [ 1329.249998] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.257455] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522afe8b-683b-b433-6a40-9902bea84400, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.278084] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1329.278084] env[69994]: value = "resgroup-9" [ 1329.278084] env[69994]: _type = "ResourcePool" [ 1329.278084] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1329.278360] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-612a5494-1cee-49b8-88ed-525cbc55d8af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.298334] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease: (returnval){ [ 1329.298334] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1329.298334] env[69994]: _type = "HttpNfcLease" [ 1329.298334] env[69994]: } obtained for vApp import into resource pool (val){ [ 1329.298334] env[69994]: value = "resgroup-9" [ 1329.298334] env[69994]: _type = "ResourcePool" [ 1329.298334] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1329.298683] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the lease: (returnval){ [ 1329.298683] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1329.298683] env[69994]: _type = "HttpNfcLease" [ 1329.298683] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1329.305493] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1329.305493] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1329.305493] env[69994]: _type = "HttpNfcLease" [ 1329.305493] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1329.429758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.430139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.430315] env[69994]: DEBUG nova.objects.instance [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'pci_requests' on Instance uuid ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1329.761015] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522afe8b-683b-b433-6a40-9902bea84400, 'name': SearchDatastore_Task, 'duration_secs': 0.008428} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.761782] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff49dce-e680-4427-b0de-dca4b62eac91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.766820] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1329.766820] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520f915f-c1b7-8c07-19de-d21bb3f73a7e" [ 1329.766820] env[69994]: _type = "Task" [ 1329.766820] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.774376] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520f915f-c1b7-8c07-19de-d21bb3f73a7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.806659] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1329.806659] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1329.806659] env[69994]: _type = "HttpNfcLease" [ 1329.806659] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1329.934106] env[69994]: DEBUG nova.objects.instance [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'numa_topology' on Instance uuid ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.277490] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520f915f-c1b7-8c07-19de-d21bb3f73a7e, 'name': SearchDatastore_Task, 'duration_secs': 0.010872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.277779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.277993] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1330.278262] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bbcefb2-e541-4d10-95a2-c59cb27bee8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.285167] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1330.285167] env[69994]: value = "task-2926634" [ 1330.285167] env[69994]: _type = "Task" [ 1330.285167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.292851] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.306318] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1330.306318] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1330.306318] env[69994]: _type = "HttpNfcLease" [ 1330.306318] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1330.306619] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1330.306619] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e258c2-a47e-dcb4-3bc9-0fa19641f003" [ 1330.306619] env[69994]: _type = "HttpNfcLease" [ 1330.306619] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1330.307389] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc8b9db-2ae6-4258-918a-647dbe2e71d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.314487] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1330.314670] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1330.382356] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6a2ec547-1cc2-4192-8033-d4bad68ca183 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.437085] env[69994]: INFO nova.compute.claims [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1330.794744] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.795037] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 1330.795822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c750550-094c-44e0-b424-a0b31dc07136 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.820023] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.820328] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95f0a666-b90c-4d76-9ec4-fea82a4aac58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.838771] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1330.838771] env[69994]: value = "task-2926635" [ 1330.838771] env[69994]: _type = "Task" [ 1330.838771] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.846662] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.351446] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.570513] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc6c13a-fae9-433c-aaaa-c16b6c55ebff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.578239] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda11d5b-533a-4fc6-bbb0-5eab64d41ab5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.608212] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715d9e0f-6d03-4c6f-83d9-bc39f07bc2a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.618107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a47819e-4971-47b9-bbe1-dc85064e036b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.635647] env[69994]: DEBUG nova.compute.provider_tree [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.852501] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926635, 'name': ReconfigVM_Task, 'duration_secs': 0.523157} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.854121] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.855074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ee7057-e8b2-4f3a-91e9-07b990778547 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.884882] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b72187cb-3e2c-4c87-9fc0-83334f3693b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.904369] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1331.904369] env[69994]: value = "task-2926636" [ 1331.904369] env[69994]: _type = "Task" [ 1331.904369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.909955] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1331.910190] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1331.910979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10f08bd-7345-4661-b812-51176e0d75fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.918337] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1331.918496] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1331.921966] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-260b9c21-37ec-43da-bdac-e78775d00b8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.923710] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926636, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.141798] env[69994]: DEBUG nova.scheduler.client.report [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.178522] env[69994]: DEBUG oslo_vmware.rw_handles [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5256af34-0570-628c-cd88-85a311c85f14/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1332.178833] env[69994]: INFO nova.virt.vmwareapi.images [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Downloaded image file data f87a1822-23bb-48ba-a487-e222570db3f4 [ 1332.179785] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb332054-0b52-48c2-85e2-e7e8cd6ffe58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.197849] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cc6426e-8c00-4e58-a06a-21b629d44b85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.230927] env[69994]: INFO nova.virt.vmwareapi.images [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] The imported VM was unregistered [ 1332.233686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1332.233911] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1332.234201] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d46831b-c3bb-4ca0-b474-30a805178e70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.246221] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Created directory with path [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1332.246406] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65/OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65.vmdk to [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1332.246635] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7e656bc5-2d82-49d6-b76d-0353b419115e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.253566] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1332.253566] env[69994]: value = "task-2926638" [ 1332.253566] env[69994]: _type = "Task" [ 1332.253566] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.261017] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.415137] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926636, 'name': ReconfigVM_Task, 'duration_secs': 0.245377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.415464] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.415690] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33aa3830-48d3-4208-8ce6-cab741ed8b83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.423433] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1332.423433] env[69994]: value = "task-2926639" [ 1332.423433] env[69994]: _type = "Task" [ 1332.423433] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.432933] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.647415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.217s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.678846] env[69994]: INFO nova.network.neutron [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1332.765402] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.934638] env[69994]: DEBUG oslo_vmware.api [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926639, 'name': PowerOnVM_Task, 'duration_secs': 0.474391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.934836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.937823] env[69994]: DEBUG nova.compute.manager [None req-d92c5bc0-55b3-49d7-a9ab-a044be6654f7 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.938718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8175bb3-6e51-4f95-b71f-5cf458e13ec7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.265295] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.765976] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.206697] env[69994]: DEBUG nova.compute.manager [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1334.207298] env[69994]: DEBUG oslo_concurrency.lockutils [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.208419] env[69994]: DEBUG oslo_concurrency.lockutils [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.208897] env[69994]: DEBUG oslo_concurrency.lockutils [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.209357] env[69994]: DEBUG nova.compute.manager [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] No waiting events found dispatching network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1334.209829] env[69994]: WARNING nova.compute.manager [req-5702b2c1-89c5-4dec-9eaf-d16681c31a49 req-8113119e-03f1-4534-8e51-33d469e0654e service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received unexpected event network-vif-plugged-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 for instance with vm_state shelved_offloaded and task_state spawning. [ 1334.270880] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.318027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.318027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.318027] env[69994]: DEBUG nova.network.neutron [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.768114] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926638, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.396286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.768114] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65/OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65.vmdk to [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk. [ 1334.768114] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Cleaning up location [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1334.768639] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_f73501cb-6122-442f-8c38-dd917995de65 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.768639] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f736ad23-bd6f-49a4-bc22-d9fc31ea1438 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.775926] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1334.775926] env[69994]: value = "task-2926640" [ 1334.775926] env[69994]: _type = "Task" [ 1334.775926] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.784349] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.286623] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.048208} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.289138] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.289322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.289557] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk to [datastore2] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1335.289831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7aad7eb-6783-4c2d-a66a-b5e90126fe5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.298664] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1335.298664] env[69994]: value = "task-2926641" [ 1335.298664] env[69994]: _type = "Task" [ 1335.298664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.307678] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.324489] env[69994]: DEBUG nova.network.neutron [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.809780] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.827756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.856525] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='729106f1ad24c5734d5719a1a5569d10',container_format='bare',created_at=2025-03-11T12:42:08Z,direct_url=,disk_format='vmdk',id=57b3393e-1b45-4b81-8d0b-45ac7731565f,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1152087824-shelved',owner='c545eb835008401ab8672be30dbcdad9',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-11T12:42:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1335.856857] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.856938] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1335.857122] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.857268] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1335.857407] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1335.857684] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1335.857862] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1335.858048] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1335.858220] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1335.858397] env[69994]: DEBUG nova.virt.hardware [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1335.859372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a372ba-2198-4c78-b22b-e41ad3212009 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.869944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a33ec4-3cfb-42e8-9bec-c645a138829d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.885535] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:22:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '090b588e-3c97-4b85-b16b-0a1f4c7e4b18', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.893540] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1335.893858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.894149] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcfa051f-601e-4576-ae22-aca86dc317bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.914844] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.914844] env[69994]: value = "task-2926642" [ 1335.914844] env[69994]: _type = "Task" [ 1335.914844] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.923683] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.075644] env[69994]: INFO nova.compute.manager [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Rescuing [ 1336.075932] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.076111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.076280] env[69994]: DEBUG nova.network.neutron [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1336.232101] env[69994]: DEBUG nova.compute.manager [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.232369] env[69994]: DEBUG nova.compute.manager [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing instance network info cache due to event network-changed-090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1336.232616] env[69994]: DEBUG oslo_concurrency.lockutils [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] Acquiring lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.232758] env[69994]: DEBUG oslo_concurrency.lockutils [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] Acquired lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.232918] env[69994]: DEBUG nova.network.neutron [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Refreshing network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1336.309518] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.424931] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.758104] env[69994]: DEBUG nova.network.neutron [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [{"id": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "address": "fa:16:3e:24:3f:e1", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b1d9c4-f8", "ovs_interfaceid": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.810013] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.925775] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.953689] env[69994]: DEBUG nova.network.neutron [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updated VIF entry in instance network info cache for port 090b588e-3c97-4b85-b16b-0a1f4c7e4b18. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1336.954060] env[69994]: DEBUG nova.network.neutron [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [{"id": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "address": "fa:16:3e:bf:22:a2", "network": {"id": "52420129-9683-4450-b7fb-f1d51ec8c3d0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2136801708-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c545eb835008401ab8672be30dbcdad9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap090b588e-3c", "ovs_interfaceid": "090b588e-3c97-4b85-b16b-0a1f4c7e4b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.261022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.311787] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.428566] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.456432] env[69994]: DEBUG oslo_concurrency.lockutils [req-613baa2b-4301-4313-a62f-53effc993f03 req-86159760-90a7-4bfd-bbe8-0d05a01ab5c3 service nova] Releasing lock "refresh_cache-ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.811557] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.927684] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.312270] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.428227] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.792317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.792693] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19988918-6410-4989-93d0-cb878d324890 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.802161] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1338.802161] env[69994]: value = "task-2926643" [ 1338.802161] env[69994]: _type = "Task" [ 1338.802161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.815037] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.818587] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.932991] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.317576] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.321314] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926643, 'name': PowerOffVM_Task, 'duration_secs': 0.440958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.321660] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1339.322667] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1cc5bb-8342-4312-a9ed-684ef45d4b48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.349260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434213e5-0f44-425e-83d4-efe25deb164f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.366921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.367276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.367558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.367815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.368039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.373074] env[69994]: INFO nova.compute.manager [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Terminating instance [ 1339.385858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1339.386617] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d4bd27b-5eed-4e57-8706-c0564dcb35b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.396322] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1339.396322] env[69994]: value = "task-2926644" [ 1339.396322] env[69994]: _type = "Task" [ 1339.396322] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.409519] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1339.409722] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1339.409978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.410147] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1339.410410] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1339.410708] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d2d4d2f-1ef2-469b-888c-b9626058fd92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.424284] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1339.424586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1339.428560] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9f09ab-b2c6-427a-b9e7-b829a70bc3dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.430991] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.435726] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1339.435726] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e87ecc-328a-775d-6354-a4be4fe49373" [ 1339.435726] env[69994]: _type = "Task" [ 1339.435726] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.445023] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e87ecc-328a-775d-6354-a4be4fe49373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.817600] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.877790] env[69994]: DEBUG nova.compute.manager [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1339.878059] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1339.878361] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-801a0b74-245f-44e8-b3cd-d6738571ecf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.886456] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1339.886456] env[69994]: value = "task-2926645" [ 1339.886456] env[69994]: _type = "Task" [ 1339.886456] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.895861] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.929019] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.946913] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e87ecc-328a-775d-6354-a4be4fe49373, 'name': SearchDatastore_Task, 'duration_secs': 0.014944} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.947779] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16794868-66d7-491f-ad54-804782c33db9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.954429] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1339.954429] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd2f0a-ef3a-9291-c543-326a5d21659f" [ 1339.954429] env[69994]: _type = "Task" [ 1339.954429] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.964200] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd2f0a-ef3a-9291-c543-326a5d21659f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.318384] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.397248] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926645, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.429729] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.464580] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52dd2f0a-ef3a-9291-c543-326a5d21659f, 'name': SearchDatastore_Task, 'duration_secs': 0.017081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.464896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1340.465166] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1340.465429] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e559e3b-9894-4f4e-9c8a-3a3cb00837a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.472191] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1340.472191] env[69994]: value = "task-2926646" [ 1340.472191] env[69994]: _type = "Task" [ 1340.472191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.481269] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.819192] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926641, 'name': CopyVirtualDisk_Task, 'duration_secs': 5.107454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.819680] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f87a1822-23bb-48ba-a487-e222570db3f4/f87a1822-23bb-48ba-a487-e222570db3f4.vmdk to [datastore2] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1340.820319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65eab02-63a8-4ebc-b255-8d918f748375 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.843923] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.844282] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef86dd98-d050-4973-893e-441b5dcd506c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.867728] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1340.867728] env[69994]: value = "task-2926647" [ 1340.867728] env[69994]: _type = "Task" [ 1340.867728] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.877289] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.898658] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926645, 'name': PowerOffVM_Task, 'duration_secs': 0.559968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.898947] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1340.899171] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1340.899374] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587667', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'name': 'volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ac72ed6c-15f7-47e3-83a0-abcd85bba128', 'attached_at': '2025-03-11T12:42:00.000000', 'detached_at': '', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'serial': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1340.900206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa692995-2a72-4da1-bb56-c7bc19345680 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.925176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e097f703-23c8-447f-87c8-6ea6975349bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.935336] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.937869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfc2f97-fbc0-4728-8130-42f91fa7e3fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.956686] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d989f5c-e9da-42fe-b906-9ab238267533 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.973262] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] The volume has not been displaced from its original location: [datastore2] volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b/volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1340.978447] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1340.981530] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8f95e1c-7e85-4a76-9e86-2b36b90be1b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.001805] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926646, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.003027] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 1341.003388] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1341.003388] env[69994]: value = "task-2926648" [ 1341.003388] env[69994]: _type = "Task" [ 1341.003388] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.004064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1894ed60-326e-49dd-a6f8-e574840e66db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.014206] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926648, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.034378] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1341.034673] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f611cee-e6db-4d15-976f-f35d4840e481 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.055123] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1341.055123] env[69994]: value = "task-2926649" [ 1341.055123] env[69994]: _type = "Task" [ 1341.055123] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.063329] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926649, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.377886] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926647, 'name': ReconfigVM_Task, 'duration_secs': 0.342936} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.378228] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 94169894-f772-41c9-95a1-ddf622f2c9f6/94169894-f772-41c9-95a1-ddf622f2c9f6.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.378997] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9327f1e-8302-4a77-ab25-f9f510002fa9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.386013] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1341.386013] env[69994]: value = "task-2926650" [ 1341.386013] env[69994]: _type = "Task" [ 1341.386013] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.394181] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926650, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.430837] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.516785] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926648, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.565299] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.896907] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926650, 'name': Rename_Task, 'duration_secs': 0.148397} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.897327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.897510] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f837e4bd-5164-43f4-a464-8151757025e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.904891] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1341.904891] env[69994]: value = "task-2926651" [ 1341.904891] env[69994]: _type = "Task" [ 1341.904891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.921498] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.929207] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.018029] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926648, 'name': ReconfigVM_Task, 'duration_secs': 0.524227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.018029] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1342.022964] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cf3a8f3-9021-4e98-aa84-6ee7ec48657e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.038504] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1342.038504] env[69994]: value = "task-2926652" [ 1342.038504] env[69994]: _type = "Task" [ 1342.038504] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.046734] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926652, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.066038] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926649, 'name': ReconfigVM_Task, 'duration_secs': 0.681473} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.066038] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfigured VM instance instance-0000007b to attach disk [datastore1] aefb7903-afd1-4574-bec1-adab769728b5/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1342.066663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6270cb1-f6ce-4738-b316-1b5f1b445e15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.093301] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-570733e9-53a0-468f-ab7e-217e920b2ede {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.111052] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1342.111052] env[69994]: value = "task-2926653" [ 1342.111052] env[69994]: _type = "Task" [ 1342.111052] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.120607] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926653, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.421541] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926651, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.431290] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.549333] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926652, 'name': ReconfigVM_Task, 'duration_secs': 0.12211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.549762] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587667', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'name': 'volume-de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ac72ed6c-15f7-47e3-83a0-abcd85bba128', 'attached_at': '2025-03-11T12:42:00.000000', 'detached_at': '', 'volume_id': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b', 'serial': 'de4ada45-4c6a-4478-87c3-f23eb2bca97b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1342.550105] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1342.550850] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83733e90-1ad9-423c-91cc-a212dbc01863 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.558565] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1342.558800] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4dc11dd9-d1e2-4f24-ba33-8b07b3124b2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.621440] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926653, 'name': ReconfigVM_Task, 'duration_secs': 0.165555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.621727] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.621997] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22134c86-31c8-476d-915f-3946d7a80025 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.646222] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1342.646222] env[69994]: value = "task-2926655" [ 1342.646222] env[69994]: _type = "Task" [ 1342.646222] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.654553] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.919488] env[69994]: DEBUG oslo_vmware.api [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926651, 'name': PowerOnVM_Task, 'duration_secs': 0.519968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.919868] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.935203] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.020805] env[69994]: DEBUG nova.compute.manager [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1343.021741] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d817ab44-44fb-4426-84e3-48becbb5a281 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.157996] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.435065] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.541574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-578a2ef6-1952-4ffa-a4f2-df2c74fdce97 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.108s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.657391] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.935806] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.049083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1344.049306] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1344.049536] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore2] ac72ed6c-15f7-47e3-83a0-abcd85bba128 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.049821] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f357475e-33d3-4812-991b-b42a115e7823 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.059868] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1344.059868] env[69994]: value = "task-2926656" [ 1344.059868] env[69994]: _type = "Task" [ 1344.059868] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.068884] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.160912] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926655, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.436402] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926642, 'name': CreateVM_Task, 'duration_secs': 8.19635} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.437530] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.437530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.437530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.438357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1344.438672] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7fac9cb-2811-49c1-8eac-c0324950da98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.445043] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1344.445043] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52c2fc50-c948-f323-c7e3-9f782ad9e16c" [ 1344.445043] env[69994]: _type = "Task" [ 1344.445043] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.464156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.464421] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Processing image 57b3393e-1b45-4b81-8d0b-45ac7731565f {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1344.464682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.464807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.464984] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.465267] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-730b03ac-fe54-4541-93ed-99f626f004fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.474531] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.474741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1344.475469] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35502e10-8cb6-4d53-b4a7-32a36c9deb55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.481973] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1344.481973] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522978a5-a567-727c-c18a-74a98a8bde12" [ 1344.481973] env[69994]: _type = "Task" [ 1344.481973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.498623] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1344.498623] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Fetch image to [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017/OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1344.498623] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Downloading stream optimized image 57b3393e-1b45-4b81-8d0b-45ac7731565f to [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017/OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1344.498623] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Downloading image file data 57b3393e-1b45-4b81-8d0b-45ac7731565f to the ESX as VM named 'OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1344.570553] env[69994]: DEBUG oslo_vmware.api [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.571429] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1344.571429] env[69994]: value = "resgroup-9" [ 1344.571429] env[69994]: _type = "ResourcePool" [ 1344.571429] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1344.571703] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.571907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1344.572134] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1344.572349] env[69994]: INFO nova.compute.manager [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Took 4.69 seconds to destroy the instance on the hypervisor. [ 1344.572681] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1344.572919] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c8f4e39e-d528-4702-8ff8-dbca75d4d105 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.588290] env[69994]: DEBUG nova.compute.manager [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1344.588428] env[69994]: DEBUG nova.network.neutron [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1344.597737] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease: (returnval){ [ 1344.597737] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a8d17-8197-dbf6-8726-c07951bc65d5" [ 1344.597737] env[69994]: _type = "HttpNfcLease" [ 1344.597737] env[69994]: } obtained for vApp import into resource pool (val){ [ 1344.597737] env[69994]: value = "resgroup-9" [ 1344.597737] env[69994]: _type = "ResourcePool" [ 1344.597737] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1344.598089] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the lease: (returnval){ [ 1344.598089] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a8d17-8197-dbf6-8726-c07951bc65d5" [ 1344.598089] env[69994]: _type = "HttpNfcLease" [ 1344.598089] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1344.604775] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1344.604775] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a8d17-8197-dbf6-8726-c07951bc65d5" [ 1344.604775] env[69994]: _type = "HttpNfcLease" [ 1344.604775] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1344.661097] env[69994]: DEBUG oslo_vmware.api [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926655, 'name': PowerOnVM_Task, 'duration_secs': 1.840646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.661097] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1344.663435] env[69994]: DEBUG nova.compute.manager [None req-8883d8b0-87d3-4490-a642-3cb5f5185911 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1344.664248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e69625-4741-4b99-8772-239a3982f86d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.073328] env[69994]: DEBUG nova.compute.manager [req-452ea003-80e9-4aa0-896f-772077e4dbac req-6bdf3d79-563b-430c-b41c-1c42fa2e9aec service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Received event network-vif-deleted-a983eff5-af52-4477-9645-db9812917bc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1345.073649] env[69994]: INFO nova.compute.manager [req-452ea003-80e9-4aa0-896f-772077e4dbac req-6bdf3d79-563b-430c-b41c-1c42fa2e9aec service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Neutron deleted interface a983eff5-af52-4477-9645-db9812917bc7; detaching it from the instance and deleting it from the info cache [ 1345.073649] env[69994]: DEBUG nova.network.neutron [req-452ea003-80e9-4aa0-896f-772077e4dbac req-6bdf3d79-563b-430c-b41c-1c42fa2e9aec service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.108116] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1345.108116] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a8d17-8197-dbf6-8726-c07951bc65d5" [ 1345.108116] env[69994]: _type = "HttpNfcLease" [ 1345.108116] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1345.108617] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1345.108617] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]529a8d17-8197-dbf6-8726-c07951bc65d5" [ 1345.108617] env[69994]: _type = "HttpNfcLease" [ 1345.108617] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1345.109825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7407e66-ab8d-4a51-af31-f5c93158cfd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.122509] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1345.122708] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1345.188099] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b861cda5-31c2-47a3-bc0e-721c7bdabf4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.549488] env[69994]: DEBUG nova.network.neutron [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.577453] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1acd0092-74bf-467e-8cea-4799de183a79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.588847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b79e238-4180-499a-bb8f-61854dfb0666 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.625093] env[69994]: DEBUG nova.compute.manager [req-452ea003-80e9-4aa0-896f-772077e4dbac req-6bdf3d79-563b-430c-b41c-1c42fa2e9aec service nova] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Detach interface failed, port_id=a983eff5-af52-4477-9645-db9812917bc7, reason: Instance ac72ed6c-15f7-47e3-83a0-abcd85bba128 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1346.051612] env[69994]: INFO nova.compute.manager [-] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Took 1.46 seconds to deallocate network for instance. [ 1346.254558] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1346.254941] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1346.255867] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2329efee-fc0b-4cf3-b472-b55fe3eedb86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.263436] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1346.263614] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1346.263824] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9db9c51f-063e-4c31-b973-0967f1098095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.283958] env[69994]: INFO nova.compute.manager [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Unrescuing [ 1346.284240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.284389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1346.284556] env[69994]: DEBUG nova.network.neutron [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.462846] env[69994]: DEBUG oslo_vmware.rw_handles [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71575-f689-d7b2-f331-8d8066743099/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1346.463118] env[69994]: INFO nova.virt.vmwareapi.images [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Downloaded image file data 57b3393e-1b45-4b81-8d0b-45ac7731565f [ 1346.464017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a302502-35a5-49c4-8fab-1256792cc23a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.479380] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-613a4c15-381d-4aa3-b829-e00f1a4f26a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.506126] env[69994]: INFO nova.virt.vmwareapi.images [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] The imported VM was unregistered [ 1346.508863] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1346.509170] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Creating directory with path [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.509535] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0db2cf96-bcfa-4f7d-a274-d9b89a2e4eb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.521680] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Created directory with path [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.521900] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017/OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017.vmdk to [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1346.522181] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-becf7880-5f66-4178-b9ab-5f9fa42f1a47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.529276] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1346.529276] env[69994]: value = "task-2926659" [ 1346.529276] env[69994]: _type = "Task" [ 1346.529276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.537881] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.606928] env[69994]: INFO nova.compute.manager [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Took 0.55 seconds to detach 1 volumes for instance. [ 1346.609238] env[69994]: DEBUG nova.compute.manager [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Deleting volume: de4ada45-4c6a-4478-87c3-f23eb2bca97b {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1347.015786] env[69994]: DEBUG nova.network.neutron [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [{"id": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "address": "fa:16:3e:24:3f:e1", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b1d9c4-f8", "ovs_interfaceid": "f7b1d9c4-f85a-4f93-a48a-87a59a84831b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.041226] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.153259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.153725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.153940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.178322] env[69994]: INFO nova.scheduler.client.report [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocations for instance ac72ed6c-15f7-47e3-83a0-abcd85bba128 [ 1347.518393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-aefb7903-afd1-4574-bec1-adab769728b5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.519179] env[69994]: DEBUG nova.objects.instance [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'flavor' on Instance uuid aefb7903-afd1-4574-bec1-adab769728b5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.544017] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.688027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8f3eada4-4f56-40ef-b431-5178e28a7445 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "ac72ed6c-15f7-47e3-83a0-abcd85bba128" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.320s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.025831] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10fc08c-abb7-48f4-aec0-10c4e213491d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.054057] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1348.054968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98563d0f-9ba8-4980-9faf-b08f7a1bd35b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.062389] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.063897] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1348.063897] env[69994]: value = "task-2926661" [ 1348.063897] env[69994]: _type = "Task" [ 1348.063897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.074626] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.380369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "5784a102-fd07-4717-a88b-ac94ad578af6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.380672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.380894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.381098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.381277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.384099] env[69994]: INFO nova.compute.manager [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Terminating instance [ 1348.542531] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.574749] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926661, 'name': PowerOffVM_Task, 'duration_secs': 0.384652} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.575065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1348.580524] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1348.580864] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b667ba10-d722-4797-ad3f-affc7c6325d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.601804] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1348.601804] env[69994]: value = "task-2926662" [ 1348.601804] env[69994]: _type = "Task" [ 1348.601804] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.611110] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.888523] env[69994]: DEBUG nova.compute.manager [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1348.888759] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1348.889672] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0844cb56-642f-4d58-91c9-a50829cabf6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.897792] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1348.898033] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bad24602-bd23-4f6e-b365-d79c60e3815f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.905068] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1348.905068] env[69994]: value = "task-2926663" [ 1348.905068] env[69994]: _type = "Task" [ 1348.905068] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.916916] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926663, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.043211] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926659, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.298067} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.043541] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017/OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017.vmdk to [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk. [ 1349.043761] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Cleaning up location [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1349.043931] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_bd50df93-b13d-43ff-9a03-f2d2894ec017 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.044704] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bca152d-d94d-4c87-b4b9-e0bb008d7c5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.053558] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1349.053558] env[69994]: value = "task-2926664" [ 1349.053558] env[69994]: _type = "Task" [ 1349.053558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.062277] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.112544] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926662, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.415459] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926663, 'name': PowerOffVM_Task, 'duration_secs': 0.230496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.415732] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1349.415905] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1349.416176] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27654f7e-5eef-4db4-a138-af26ba13f3f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.486025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1349.486293] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1349.486461] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore1] 5784a102-fd07-4717-a88b-ac94ad578af6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.486739] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a81da611-dd37-4661-816c-a729eefd33fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.493724] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1349.493724] env[69994]: value = "task-2926666" [ 1349.493724] env[69994]: _type = "Task" [ 1349.493724] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.501687] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.563203] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.059146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.563633] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.563678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.563906] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk to [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1349.564190] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9822af2e-c954-4be3-9252-72a650bc2b47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.570768] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1349.570768] env[69994]: value = "task-2926667" [ 1349.570768] env[69994]: _type = "Task" [ 1349.570768] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.579048] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.612937] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926662, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.005255] env[69994]: DEBUG oslo_vmware.api [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139803} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.005490] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1350.005687] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1350.005869] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1350.006059] env[69994]: INFO nova.compute.manager [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1350.006325] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1350.006539] env[69994]: DEBUG nova.compute.manager [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1350.006637] env[69994]: DEBUG nova.network.neutron [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1350.082260] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.114638] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926662, 'name': ReconfigVM_Task, 'duration_secs': 1.30697} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.114949] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1350.115167] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.115511] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa9946e6-8a0b-4e23-b8f1-240af1a72dfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.125382] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1350.125382] env[69994]: value = "task-2926668" [ 1350.125382] env[69994]: _type = "Task" [ 1350.125382] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.135426] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926668, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.316763] env[69994]: DEBUG nova.compute.manager [req-42d5b038-8ea2-4708-960e-0a7bee51bb56 req-3c46c34a-d6b1-4f19-a705-2413087cf76a service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Received event network-vif-deleted-5881a64a-b640-4414-b3cd-35a42d39632b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1350.317081] env[69994]: INFO nova.compute.manager [req-42d5b038-8ea2-4708-960e-0a7bee51bb56 req-3c46c34a-d6b1-4f19-a705-2413087cf76a service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Neutron deleted interface 5881a64a-b640-4414-b3cd-35a42d39632b; detaching it from the instance and deleting it from the info cache [ 1350.317314] env[69994]: DEBUG nova.network.neutron [req-42d5b038-8ea2-4708-960e-0a7bee51bb56 req-3c46c34a-d6b1-4f19-a705-2413087cf76a service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.584368] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.636118] env[69994]: DEBUG oslo_vmware.api [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926668, 'name': PowerOnVM_Task, 'duration_secs': 0.504052} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.636538] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.636684] env[69994]: DEBUG nova.compute.manager [None req-4bfd1aec-d612-40f3-931a-10c8bcf8270e tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.637538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74baa9a1-3942-40ca-a38f-5912a8a3ad98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.797285] env[69994]: DEBUG nova.network.neutron [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.820224] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f99fb99-0a3c-4e52-8e48-c424ca8bb1ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.833306] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed90d499-c7b9-4241-8a8c-8b33a6d11a6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.867622] env[69994]: DEBUG nova.compute.manager [req-42d5b038-8ea2-4708-960e-0a7bee51bb56 req-3c46c34a-d6b1-4f19-a705-2413087cf76a service nova] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Detach interface failed, port_id=5881a64a-b640-4414-b3cd-35a42d39632b, reason: Instance 5784a102-fd07-4717-a88b-ac94ad578af6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1351.083141] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.300875] env[69994]: INFO nova.compute.manager [-] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Took 1.29 seconds to deallocate network for instance. [ 1351.585060] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.806874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.807234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.807487] env[69994]: DEBUG nova.objects.instance [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'resources' on Instance uuid 5784a102-fd07-4717-a88b-ac94ad578af6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1352.084294] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926667, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.493281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.084589] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/57b3393e-1b45-4b81-8d0b-45ac7731565f/57b3393e-1b45-4b81-8d0b-45ac7731565f.vmdk to [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.085410] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a9d888-9b6d-409a-81fc-1a96d84ec6b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.108614] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.108898] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3de3d25-76bb-4973-bb05-9ee4ea27e01b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.129127] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1352.129127] env[69994]: value = "task-2926669" [ 1352.129127] env[69994]: _type = "Task" [ 1352.129127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.137252] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.418300] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deebcbf-657c-4191-8ec1-25d8ce9aa447 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.426475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57762833-bf30-4dae-ac5c-f69e7814b779 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.456465] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c43a0ac-fcb0-4316-af4c-1b6a2eec528c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.463915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f162430-1b5e-4860-90af-171f89131bbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.477010] env[69994]: DEBUG nova.compute.provider_tree [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.640440] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.983018] env[69994]: DEBUG nova.scheduler.client.report [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1353.144015] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926669, 'name': ReconfigVM_Task, 'duration_secs': 0.756563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.144015] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Reconfigured VM instance instance-00000077 to attach disk [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd/ead967bc-ba1d-4c3c-8dbb-e284b444ffcd.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.144015] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b91e9ffb-436e-48dc-b2e4-c9517a6bff67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.151693] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1353.151693] env[69994]: value = "task-2926670" [ 1353.151693] env[69994]: _type = "Task" [ 1353.151693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.165766] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926670, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.486964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.504260] env[69994]: INFO nova.scheduler.client.report [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocations for instance 5784a102-fd07-4717-a88b-ac94ad578af6 [ 1353.662651] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926670, 'name': Rename_Task, 'duration_secs': 0.440714} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.662955] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.663201] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bf53224-5df9-4614-bb63-c4fdbabe5f08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.670248] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1353.670248] env[69994]: value = "task-2926671" [ 1353.670248] env[69994]: _type = "Task" [ 1353.670248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.678798] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.013035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2deedc-eabb-4f57-b014-e18f7f3a17e4 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "5784a102-fd07-4717-a88b-ac94ad578af6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.631s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.180375] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926671, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.271026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1354.271213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1354.661676] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.661938] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.662097] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.662258] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.662408] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.662620] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.662773] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.663052] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1354.663052] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.682194] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926671, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.773476] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1355.166417] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.166669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.166838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.166994] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1355.168308] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f16158-dc8c-4e7e-8928-fd12b5d08893 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.178762] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb42356-45ed-4979-bd29-ceafeef35104 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.185149] env[69994]: DEBUG oslo_vmware.api [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926671, 'name': PowerOnVM_Task, 'duration_secs': 1.143452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.185676] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.196758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b606aa1c-7c8f-4d51-8374-404392616158 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.203196] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c319c914-6b08-4d31-bf29-33c4de8bfe23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.231612] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179695MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1355.231776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.231966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.287152] env[69994]: DEBUG nova.compute.manager [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.288324] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c73772a-d61a-4048-b672-566c0c2f5bcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.298523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.807901] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e38c7c3c-6c8a-42d5-93e4-e7bcc139f4bb tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 27.406s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.210642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.211664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.211664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.211842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.211914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.214136] env[69994]: INFO nova.compute.manager [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Terminating instance [ 1356.259379] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8001cb13-6a52-451b-b4b6-57b893975079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.259540] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.259666] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 328868f0-2fe9-4c04-a669-54b073c53b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.259791] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance aefb7903-afd1-4574-bec1-adab769728b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.259905] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.260028] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 94169894-f772-41c9-95a1-ddf622f2c9f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.260145] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.718932] env[69994]: DEBUG nova.compute.manager [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1356.720584] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1356.721499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b89143d-bdce-4003-a35e-154fa87d228d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.729511] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1356.729748] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1080e855-a5e4-4e01-87a9-7346950cd416 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.736109] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1356.736109] env[69994]: value = "task-2926673" [ 1356.736109] env[69994]: _type = "Task" [ 1356.736109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.744431] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.763382] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1356.763662] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1356.763747] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1356.866576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40973c9-c282-4d00-807e-d76982ca6b8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.875399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50e8a01-48df-4aa3-9982-bb5b81551c37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.906397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b2ddb7-4b60-4e3a-93ca-98ee71c22f14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.917658] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b155e5e-a46e-4dc6-a70b-f7dcc0861279 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.931403] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.246075] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926673, 'name': PowerOffVM_Task, 'duration_secs': 0.219662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.246351] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1357.246512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1357.246753] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7da26f1-4289-424d-a18a-63fe78498bdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.310878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1357.311166] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1357.311453] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleting the datastore file [datastore2] ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1357.311816] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9867ba7-f21c-448f-a297-8eef8a1c0c0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.319297] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for the task: (returnval){ [ 1357.319297] env[69994]: value = "task-2926675" [ 1357.319297] env[69994]: _type = "Task" [ 1357.319297] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.328179] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.434725] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1357.830483] env[69994]: DEBUG oslo_vmware.api [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Task: {'id': task-2926675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137383} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.830843] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1357.831061] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1357.831241] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1357.831421] env[69994]: INFO nova.compute.manager [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1357.831662] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1357.831860] env[69994]: DEBUG nova.compute.manager [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1357.831956] env[69994]: DEBUG nova.network.neutron [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1357.940808] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1357.941114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.709s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.941333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.643s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.943814] env[69994]: INFO nova.compute.claims [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1358.092737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "8001cb13-6a52-451b-b4b6-57b893975079" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.093051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.093275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "8001cb13-6a52-451b-b4b6-57b893975079-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.093460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.093662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.095783] env[69994]: INFO nova.compute.manager [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Terminating instance [ 1358.310355] env[69994]: DEBUG nova.compute.manager [req-b743b765-e4fb-43a3-89b5-862f59d44956 req-bbb3920d-6df2-498f-9de6-43cb5373f20c service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Received event network-vif-deleted-090b588e-3c97-4b85-b16b-0a1f4c7e4b18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.310632] env[69994]: INFO nova.compute.manager [req-b743b765-e4fb-43a3-89b5-862f59d44956 req-bbb3920d-6df2-498f-9de6-43cb5373f20c service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Neutron deleted interface 090b588e-3c97-4b85-b16b-0a1f4c7e4b18; detaching it from the instance and deleting it from the info cache [ 1358.310709] env[69994]: DEBUG nova.network.neutron [req-b743b765-e4fb-43a3-89b5-862f59d44956 req-bbb3920d-6df2-498f-9de6-43cb5373f20c service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.599417] env[69994]: DEBUG nova.compute.manager [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1358.599684] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1358.600405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e235d0c2-da38-48e9-8bac-9c2ef3566e58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.609313] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1358.609561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b7df972-2a94-49cf-bfcd-8f8627d0072c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.618192] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1358.618192] env[69994]: value = "task-2926676" [ 1358.618192] env[69994]: _type = "Task" [ 1358.618192] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.627470] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.792977] env[69994]: DEBUG nova.network.neutron [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.813693] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0b4acb1-280d-426c-815b-f76b2cb45e68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.824765] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ac2afe-22a3-4b53-9a69-8fa9e7517d2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.857197] env[69994]: DEBUG nova.compute.manager [req-b743b765-e4fb-43a3-89b5-862f59d44956 req-bbb3920d-6df2-498f-9de6-43cb5373f20c service nova] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Detach interface failed, port_id=090b588e-3c97-4b85-b16b-0a1f4c7e4b18, reason: Instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1358.899511] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "328868f0-2fe9-4c04-a669-54b073c53b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.899789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.900012] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.900211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.900383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.902517] env[69994]: INFO nova.compute.manager [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Terminating instance [ 1359.093402] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8639e46-2314-4442-a9f5-e9f07bdb6821 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.102300] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bba817-fc5e-4480-8b6b-090310266879 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.135371] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b041030-676b-4035-80e5-eb7d7852d1ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.144367] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926676, 'name': PowerOffVM_Task, 'duration_secs': 0.182005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.146451] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.147284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.147284] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61e11fbb-ab38-4ad1-a395-f5144b256b58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.149449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff46e291-3038-4ede-97e5-f1cd895012a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.163770] env[69994]: DEBUG nova.compute.provider_tree [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.225525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.225747] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.225848] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleting the datastore file [datastore2] 8001cb13-6a52-451b-b4b6-57b893975079 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.226175] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc630797-e13c-4895-b4b2-b6f23b3e5085 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.235264] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for the task: (returnval){ [ 1359.235264] env[69994]: value = "task-2926678" [ 1359.235264] env[69994]: _type = "Task" [ 1359.235264] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.245678] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.295557] env[69994]: INFO nova.compute.manager [-] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Took 1.46 seconds to deallocate network for instance. [ 1359.405961] env[69994]: DEBUG nova.compute.manager [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.406209] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.407172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8896469-4624-400a-bc72-9f4a871bfad4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.415685] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.415934] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ced8be5a-bf22-4df8-86c7-4e318b688de0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.422109] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1359.422109] env[69994]: value = "task-2926679" [ 1359.422109] env[69994]: _type = "Task" [ 1359.422109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.430382] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.667539] env[69994]: DEBUG nova.scheduler.client.report [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1359.746032] env[69994]: DEBUG oslo_vmware.api [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Task: {'id': task-2926678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162394} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.746198] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.746329] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1359.746512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1359.746689] env[69994]: INFO nova.compute.manager [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1359.746937] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1359.747470] env[69994]: DEBUG nova.compute.manager [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1359.747575] env[69994]: DEBUG nova.network.neutron [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1359.802551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.934036] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926679, 'name': PowerOffVM_Task, 'duration_secs': 0.223501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.934431] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.934646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.935058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6a9f2e4-ca6a-44e0-8e36-afad19c8efa4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.172508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.173094] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1360.175752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.373s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.175973] env[69994]: DEBUG nova.objects.instance [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lazy-loading 'resources' on Instance uuid ead967bc-ba1d-4c3c-8dbb-e284b444ffcd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.231420] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.231684] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.231827] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleting the datastore file [datastore1] 328868f0-2fe9-4c04-a669-54b073c53b14 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.232148] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1939b825-8dff-4cc5-997c-3e934dfbb9d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.239463] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1360.239463] env[69994]: value = "task-2926681" [ 1360.239463] env[69994]: _type = "Task" [ 1360.239463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.247736] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.336655] env[69994]: DEBUG nova.compute.manager [req-4828d5e9-ac47-4051-8e36-e65ec2faec7a req-66c9d470-80fa-471d-ae78-5fe9c021dc13 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Received event network-vif-deleted-68208872-218b-45a2-b062-bedcf2b0803e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1360.336845] env[69994]: INFO nova.compute.manager [req-4828d5e9-ac47-4051-8e36-e65ec2faec7a req-66c9d470-80fa-471d-ae78-5fe9c021dc13 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Neutron deleted interface 68208872-218b-45a2-b062-bedcf2b0803e; detaching it from the instance and deleting it from the info cache [ 1360.336987] env[69994]: DEBUG nova.network.neutron [req-4828d5e9-ac47-4051-8e36-e65ec2faec7a req-66c9d470-80fa-471d-ae78-5fe9c021dc13 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.494300] env[69994]: DEBUG nova.network.neutron [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.678815] env[69994]: DEBUG nova.compute.utils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1360.683022] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1360.683221] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1360.744787] env[69994]: DEBUG nova.policy [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6a83397a40d4752826e9572ef3e2626', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c3dcb9ec62247adb210b83c9de8bf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1360.756237] env[69994]: DEBUG oslo_vmware.api [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.757289] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.757289] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.757289] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.757289] env[69994]: INFO nova.compute.manager [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1360.757289] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1360.759537] env[69994]: DEBUG nova.compute.manager [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1360.759642] env[69994]: DEBUG nova.network.neutron [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1360.797539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7527a66b-bb38-46fc-b4b0-fef2dbbd0d8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.806566] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84a9c1f-5953-4cfb-b794-c9174e560402 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.845571] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cabdbca8-d640-48b6-a433-0bd038875208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.848260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44152b2b-632b-4416-8944-549794621570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.858378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38bbb90-4bc2-4eab-876c-8621bb168511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.866777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2a59f5-35a0-4066-8015-a42be12c3818 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.887211] env[69994]: DEBUG nova.compute.provider_tree [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.900359] env[69994]: DEBUG nova.compute.manager [req-4828d5e9-ac47-4051-8e36-e65ec2faec7a req-66c9d470-80fa-471d-ae78-5fe9c021dc13 service nova] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Detach interface failed, port_id=68208872-218b-45a2-b062-bedcf2b0803e, reason: Instance 8001cb13-6a52-451b-b4b6-57b893975079 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1360.996983] env[69994]: INFO nova.compute.manager [-] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Took 1.25 seconds to deallocate network for instance. [ 1361.045099] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Successfully created port: 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1361.186181] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1361.237860] env[69994]: DEBUG nova.compute.manager [req-3cbf73ef-8dba-42a0-b956-3adcaf8025cd req-9c5f1e49-e57d-4ec9-a357-c883dbb2c3fb service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Received event network-vif-deleted-e3759990-8ec1-401b-9393-767859b0a13f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1361.238081] env[69994]: INFO nova.compute.manager [req-3cbf73ef-8dba-42a0-b956-3adcaf8025cd req-9c5f1e49-e57d-4ec9-a357-c883dbb2c3fb service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Neutron deleted interface e3759990-8ec1-401b-9393-767859b0a13f; detaching it from the instance and deleting it from the info cache [ 1361.238300] env[69994]: DEBUG nova.network.neutron [req-3cbf73ef-8dba-42a0-b956-3adcaf8025cd req-9c5f1e49-e57d-4ec9-a357-c883dbb2c3fb service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.401747] env[69994]: DEBUG nova.scheduler.client.report [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.503985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.715841] env[69994]: DEBUG nova.network.neutron [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.742372] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d00c59f-37c9-4ad4-88d0-a2b9efdda936 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.752321] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eb60b0-836d-4dbb-9c3d-8d8a18e7bab9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.782320] env[69994]: DEBUG nova.compute.manager [req-3cbf73ef-8dba-42a0-b956-3adcaf8025cd req-9c5f1e49-e57d-4ec9-a357-c883dbb2c3fb service nova] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Detach interface failed, port_id=e3759990-8ec1-401b-9393-767859b0a13f, reason: Instance 328868f0-2fe9-4c04-a669-54b073c53b14 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1361.906750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.909050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.405s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.909296] env[69994]: DEBUG nova.objects.instance [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lazy-loading 'resources' on Instance uuid 8001cb13-6a52-451b-b4b6-57b893975079 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1361.931664] env[69994]: INFO nova.scheduler.client.report [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Deleted allocations for instance ead967bc-ba1d-4c3c-8dbb-e284b444ffcd [ 1362.195985] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1362.217216] env[69994]: INFO nova.compute.manager [-] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Took 1.46 seconds to deallocate network for instance. [ 1362.223947] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1362.224213] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1362.224372] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1362.224551] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1362.224698] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1362.224847] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1362.225058] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1362.225222] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1362.225388] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1362.225549] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1362.225719] env[69994]: DEBUG nova.virt.hardware [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1362.226582] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff26c783-3a32-4b27-a8e8-b58406c6a250 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.235502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295a443f-ebfc-453d-a419-c16313814084 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.440263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2193345f-ed73-421c-887d-16342cac2aca tempest-ServerActionsTestOtherB-1022362737 tempest-ServerActionsTestOtherB-1022362737-project-member] Lock "ead967bc-ba1d-4c3c-8dbb-e284b444ffcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.229s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.467544] env[69994]: DEBUG nova.compute.manager [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-vif-plugged-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1362.467956] env[69994]: DEBUG oslo_concurrency.lockutils [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.468301] env[69994]: DEBUG oslo_concurrency.lockutils [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.468540] env[69994]: DEBUG oslo_concurrency.lockutils [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.468790] env[69994]: DEBUG nova.compute.manager [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] No waiting events found dispatching network-vif-plugged-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1362.469014] env[69994]: WARNING nova.compute.manager [req-4889c1ec-eed7-4a7e-a8a0-e65beebdd727 req-8af7b1e0-2400-47a6-a3e9-7d69cfd3475e service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received unexpected event network-vif-plugged-54fdc013-6818-4f6e-8b1e-a5b46c4879fb for instance with vm_state building and task_state spawning. [ 1362.516579] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771ea689-e4cd-48a7-b9c3-8f399e1a638d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.524703] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99ea3e4-1c40-4dd2-9811-0f9e8a9f8ba1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.556157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4568d9d-430e-4b50-9137-4f99e83db94e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.560778] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Successfully updated port: 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.565773] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed871a27-f1d7-42fc-9eff-bfb57dd84947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.580737] env[69994]: DEBUG nova.compute.provider_tree [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.730805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.065435] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.065559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1363.065670] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1363.083402] env[69994]: DEBUG nova.scheduler.client.report [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1363.588246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.591622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.860s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.591622] env[69994]: DEBUG nova.objects.instance [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'resources' on Instance uuid 328868f0-2fe9-4c04-a669-54b073c53b14 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.597517] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.605358] env[69994]: INFO nova.scheduler.client.report [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Deleted allocations for instance 8001cb13-6a52-451b-b4b6-57b893975079 [ 1363.734480] env[69994]: DEBUG nova.network.neutron [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.116743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e7d3797-d290-46f4-9045-7bbdf77f6193 tempest-ServerActionsTestOtherA-1668209586 tempest-ServerActionsTestOtherA-1668209586-project-member] Lock "8001cb13-6a52-451b-b4b6-57b893975079" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.023s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.185199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852d229d-7cec-4503-b073-92bc4be15f25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.194080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f204c8-59ce-4e34-971d-0d5e0dcb3b5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.225944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7459ff0-f4c3-40a3-88d3-c1a40eed57d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.234191] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d550aa-279b-4cf5-a98a-fd9b26eb401e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.238380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1364.238675] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Instance network_info: |[{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1364.239371] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:2a:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54fdc013-6818-4f6e-8b1e-a5b46c4879fb', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1364.246803] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1364.247409] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1364.247643] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69826235-787d-4f64-96ad-7fb04e6d92a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.269277] env[69994]: DEBUG nova.compute.provider_tree [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.276189] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1364.276189] env[69994]: value = "task-2926683" [ 1364.276189] env[69994]: _type = "Task" [ 1364.276189] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.285095] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926683, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.496936] env[69994]: DEBUG nova.compute.manager [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.496936] env[69994]: DEBUG nova.compute.manager [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing instance network info cache due to event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1364.496936] env[69994]: DEBUG oslo_concurrency.lockutils [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.496936] env[69994]: DEBUG oslo_concurrency.lockutils [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.496936] env[69994]: DEBUG nova.network.neutron [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.772259] env[69994]: DEBUG nova.scheduler.client.report [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1364.786895] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926683, 'name': CreateVM_Task, 'duration_secs': 0.311695} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.787083] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1364.788049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.788224] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.788569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1364.788847] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7899d0-0b9f-46c6-8917-2070a26f3624 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.797329] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1364.797329] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4c8b3-c5a8-4a49-d254-b71ebdf5b8ac" [ 1364.797329] env[69994]: _type = "Task" [ 1364.797329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.807840] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4c8b3-c5a8-4a49-d254-b71ebdf5b8ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.277280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.305390] env[69994]: INFO nova.scheduler.client.report [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted allocations for instance 328868f0-2fe9-4c04-a669-54b073c53b14 [ 1365.318256] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52f4c8b3-c5a8-4a49-d254-b71ebdf5b8ac, 'name': SearchDatastore_Task, 'duration_secs': 0.014019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.318256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.318256] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1365.318256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.318256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.318509] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.318649] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fdd954f-bbe5-41e7-9ee0-f7c783afd1bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.338904] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.339164] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1365.339930] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94670188-725e-4dfe-b9cd-dfae11769088 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.349334] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1365.349334] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52786d0a-a97f-3b48-a804-1fb74bc23d3b" [ 1365.349334] env[69994]: _type = "Task" [ 1365.349334] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.357894] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52786d0a-a97f-3b48-a804-1fb74bc23d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.378380] env[69994]: DEBUG nova.network.neutron [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updated VIF entry in instance network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.378380] env[69994]: DEBUG nova.network.neutron [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.822825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2efd9568-4d45-4f21-99b0-fa6a05891bb2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "328868f0-2fe9-4c04-a669-54b073c53b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.858931] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52786d0a-a97f-3b48-a804-1fb74bc23d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.859755] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea4ea92-7e85-4a98-9087-6893cfddd2a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.866088] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1365.866088] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b08644-dbf5-0a63-c72a-0f3e65da72e6" [ 1365.866088] env[69994]: _type = "Task" [ 1365.866088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.874768] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b08644-dbf5-0a63-c72a-0f3e65da72e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.880398] env[69994]: DEBUG oslo_concurrency.lockutils [req-92a1df86-b3ee-4da8-8d1b-cf14f8e1fcc7 req-c388494a-1a08-4c11-bb69-2120c54fb2f4 service nova] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.378691] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52b08644-dbf5-0a63-c72a-0f3e65da72e6, 'name': SearchDatastore_Task, 'duration_secs': 0.011202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.378988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.379334] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/1c47c71a-65c7-4753-85e6-e0a6a93f08a1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1366.379683] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31683a41-8d92-4311-9220-bdffdeec8002 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.388838] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1366.388838] env[69994]: value = "task-2926684" [ 1366.388838] env[69994]: _type = "Task" [ 1366.388838] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.398723] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.904941] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.905295] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/1c47c71a-65c7-4753-85e6-e0a6a93f08a1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1366.907319] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1366.907319] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7386920-cfde-4ed8-9781-e53c99dd7cb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.913245] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1366.913245] env[69994]: value = "task-2926685" [ 1366.913245] env[69994]: _type = "Task" [ 1366.913245] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.924226] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.425158] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068931} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.425453] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1367.426316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861d99cb-6f17-4435-ba2d-37a635312d9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.449886] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/1c47c71a-65c7-4753-85e6-e0a6a93f08a1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1367.450235] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b2699bb-5951-4a1f-930a-c0f96eea63fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.470958] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1367.470958] env[69994]: value = "task-2926686" [ 1367.470958] env[69994]: _type = "Task" [ 1367.470958] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.479107] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926686, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.982185] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926686, 'name': ReconfigVM_Task, 'duration_secs': 0.288749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.982484] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/1c47c71a-65c7-4753-85e6-e0a6a93f08a1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1367.983142] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5232679c-6f8b-4765-b0b5-d4a69b985d63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.991340] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1367.991340] env[69994]: value = "task-2926687" [ 1367.991340] env[69994]: _type = "Task" [ 1367.991340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.999770] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926687, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.203125] env[69994]: DEBUG nova.compute.manager [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1368.501559] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926687, 'name': Rename_Task, 'duration_secs': 0.145873} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.501846] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1368.502105] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b07e8293-8942-4fe6-9f4d-5a9cd88726eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.508830] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1368.508830] env[69994]: value = "task-2926688" [ 1368.508830] env[69994]: _type = "Task" [ 1368.508830] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.516427] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926688, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.724424] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.724813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.021052] env[69994]: DEBUG oslo_vmware.api [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926688, 'name': PowerOnVM_Task, 'duration_secs': 0.456449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.021516] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1369.021829] env[69994]: INFO nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1369.022106] env[69994]: DEBUG nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1369.022926] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad77cab-00b1-4c1e-ba8f-9f5e5490428e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.230461] env[69994]: INFO nova.compute.claims [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1369.539617] env[69994]: INFO nova.compute.manager [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Took 14.26 seconds to build instance. [ 1369.736198] env[69994]: INFO nova.compute.resource_tracker [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating resource usage from migration a989f1bf-dee4-4f57-a259-abc9716bfabd [ 1369.814080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e771477-9140-4723-9f99-1a4466ba9bc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.822847] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ff55f1-0555-4900-80cb-c86467d76cc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.853131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7654aa-bc1c-455d-b7fc-57590371538c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.860942] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddc4035-fe42-44a1-b2fc-3229eff7bfd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.874482] env[69994]: DEBUG nova.compute.provider_tree [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.041313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-412cc3b4-a68d-46c7-a665-d2fe2cf0aea3 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.770s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.377715] env[69994]: DEBUG nova.scheduler.client.report [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1370.659515] env[69994]: DEBUG nova.compute.manager [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1370.659649] env[69994]: DEBUG nova.compute.manager [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing instance network info cache due to event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1370.659745] env[69994]: DEBUG oslo_concurrency.lockutils [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.659884] env[69994]: DEBUG oslo_concurrency.lockutils [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1370.660062] env[69994]: DEBUG nova.network.neutron [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.882519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.158s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.882783] env[69994]: INFO nova.compute.manager [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Migrating [ 1371.374197] env[69994]: DEBUG nova.network.neutron [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updated VIF entry in instance network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.374575] env[69994]: DEBUG nova.network.neutron [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.396777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.396939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.397133] env[69994]: DEBUG nova.network.neutron [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.877992] env[69994]: DEBUG oslo_concurrency.lockutils [req-87733b60-f8dd-45f3-94c7-1abc41e862f2 req-78c1a965-619f-49ba-b1c9-f8c2fd93f566 service nova] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1372.096075] env[69994]: DEBUG nova.network.neutron [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.598630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1374.113111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0375fe-8999-4256-83ea-4bbd084a09d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.133518] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1374.639099] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.639390] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a42b482b-7be0-400d-94b2-3e6d3c90eff5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.648587] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1374.648587] env[69994]: value = "task-2926689" [ 1374.648587] env[69994]: _type = "Task" [ 1374.648587] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.656962] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.159462] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926689, 'name': PowerOffVM_Task, 'duration_secs': 0.216498} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.159864] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.159912] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1375.666354] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1375.666587] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.666676] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1375.666881] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.667060] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1375.667215] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1375.667420] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1375.667578] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1375.667743] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1375.667903] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1375.668095] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1375.673284] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e757d6d-c12b-4279-8251-aaabc0acf833 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.690555] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1375.690555] env[69994]: value = "task-2926690" [ 1375.690555] env[69994]: _type = "Task" [ 1375.690555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.698998] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.201360] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926690, 'name': ReconfigVM_Task, 'duration_secs': 0.26267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.201772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1376.708316] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1376.708570] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.708713] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1376.708899] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.709060] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1376.709208] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1376.709413] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1376.709573] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1376.709738] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1376.709900] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1376.710090] env[69994]: DEBUG nova.virt.hardware [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1376.715702] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1376.715992] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a100ac8d-4bea-4622-913b-28dc6bf782e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.737367] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1376.737367] env[69994]: value = "task-2926691" [ 1376.737367] env[69994]: _type = "Task" [ 1376.737367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.746673] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.248882] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926691, 'name': ReconfigVM_Task, 'duration_secs': 0.165038} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.249250] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1377.249884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec54c3dc-900d-41a9-b935-023c9b7c3960 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.271579] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.272149] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3378bf38-c9a1-4e79-a5f6-fe560290858d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.290303] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1377.290303] env[69994]: value = "task-2926692" [ 1377.290303] env[69994]: _type = "Task" [ 1377.290303] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.303662] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.800777] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926692, 'name': ReconfigVM_Task, 'duration_secs': 0.352599} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.801097] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9/29ea539a-d8f4-487b-b5e7-1f15534272f9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.801323] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1378.308204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78458d92-8c95-4467-934e-3802436291f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.329173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a342e107-7adf-45f1-9a4c-cc0ae69b0e32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.351435] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1378.891490] env[69994]: DEBUG nova.network.neutron [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Port 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1379.913026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.913308] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.913428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.455015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.455260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.455473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.455657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.455823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.457680] env[69994]: INFO nova.compute.manager [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Terminating instance [ 1380.947965] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.948259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1380.948345] env[69994]: DEBUG nova.network.neutron [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.961846] env[69994]: DEBUG nova.compute.manager [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1380.962066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1380.962978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8f0bfe-a223-4fcb-8115-c390b4bd76a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.972357] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.972591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e46a42bb-c2ae-427c-bf0c-e92bd7e7b876 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.979839] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1380.979839] env[69994]: value = "task-2926693" [ 1380.979839] env[69994]: _type = "Task" [ 1380.979839] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.989258] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.492026] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926693, 'name': PowerOffVM_Task, 'duration_secs': 0.201782} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.492026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1381.492208] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1381.492419] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fdddb8f-dc05-42ae-bd21-12acdcd8891f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.557400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1381.557644] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1381.557805] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleting the datastore file [datastore2] 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.558068] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ed4d4f9-7dfb-4738-b5e7-f0670fcb80b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.564268] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for the task: (returnval){ [ 1381.564268] env[69994]: value = "task-2926695" [ 1381.564268] env[69994]: _type = "Task" [ 1381.564268] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.573216] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.661729] env[69994]: DEBUG nova.network.neutron [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.074456] env[69994]: DEBUG oslo_vmware.api [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Task: {'id': task-2926695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159491} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.074849] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1382.074945] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1382.075069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1382.075256] env[69994]: INFO nova.compute.manager [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1382.075489] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1382.075684] env[69994]: DEBUG nova.compute.manager [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1382.075780] env[69994]: DEBUG nova.network.neutron [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1382.164924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1382.511403] env[69994]: DEBUG nova.compute.manager [req-84face33-0f8b-41fe-9586-a945f0f50bd2 req-818caf5c-8617-43e2-a8a2-234f4018e964 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Received event network-vif-deleted-6645c1c7-f316-403a-98aa-8b2cca92f8e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1382.511608] env[69994]: INFO nova.compute.manager [req-84face33-0f8b-41fe-9586-a945f0f50bd2 req-818caf5c-8617-43e2-a8a2-234f4018e964 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Neutron deleted interface 6645c1c7-f316-403a-98aa-8b2cca92f8e4; detaching it from the instance and deleting it from the info cache [ 1382.511788] env[69994]: DEBUG nova.network.neutron [req-84face33-0f8b-41fe-9586-a945f0f50bd2 req-818caf5c-8617-43e2-a8a2-234f4018e964 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.691187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67d35d1-9c95-415f-bcec-e1eb95088fec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.710476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92f305e-88d1-4a3e-9953-62a77a08bbb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.717486] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1382.991036] env[69994]: DEBUG nova.network.neutron [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.014229] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6e30922-c397-4900-a021-d5df83a4174b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.025701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99a326b-53bd-4aca-b03c-6b90f835c93f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.053713] env[69994]: DEBUG nova.compute.manager [req-84face33-0f8b-41fe-9586-a945f0f50bd2 req-818caf5c-8617-43e2-a8a2-234f4018e964 service nova] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Detach interface failed, port_id=6645c1c7-f316-403a-98aa-8b2cca92f8e4, reason: Instance 94169894-f772-41c9-95a1-ddf622f2c9f6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1383.223998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1383.224451] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96e851b0-57f4-4021-95a0-7db964889b70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.232621] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1383.232621] env[69994]: value = "task-2926696" [ 1383.232621] env[69994]: _type = "Task" [ 1383.232621] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.240694] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.493571] env[69994]: INFO nova.compute.manager [-] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Took 1.42 seconds to deallocate network for instance. [ 1383.742229] env[69994]: DEBUG oslo_vmware.api [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926696, 'name': PowerOnVM_Task, 'duration_secs': 0.369732} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.742457] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.742639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e29434ad-2d3d-4fd3-ad22-035eb9c92a47 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance '29ea539a-d8f4-487b-b5e7-1f15534272f9' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1384.000068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.000352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.000580] env[69994]: DEBUG nova.objects.instance [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lazy-loading 'resources' on Instance uuid 94169894-f772-41c9-95a1-ddf622f2c9f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1384.589085] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11383f8a-20ad-450b-8f79-73fded3e259d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.596413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed9acb9-819e-4823-b50b-798af25bc35c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.627070] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a9c0cd-5e0c-42f2-87b0-1c5959d43a9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.634169] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49729618-2a03-4af3-b989-356df5490da9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.646987] env[69994]: DEBUG nova.compute.provider_tree [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.151062] env[69994]: DEBUG nova.scheduler.client.report [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1385.655412] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.655s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.674112] env[69994]: INFO nova.scheduler.client.report [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Deleted allocations for instance 94169894-f772-41c9-95a1-ddf622f2c9f6 [ 1386.181335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f3802b91-24dc-4d9c-9b12-1a1f2aad3448 tempest-AttachVolumeShelveTestJSON-1660149816 tempest-AttachVolumeShelveTestJSON-1660149816-project-member] Lock "94169894-f772-41c9-95a1-ddf622f2c9f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.726s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.348258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1386.348514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1386.348706] env[69994]: DEBUG nova.compute.manager [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Going to confirm migration 9 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1386.888132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.888382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.888455] env[69994]: DEBUG nova.network.neutron [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1386.888637] env[69994]: DEBUG nova.objects.instance [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'info_cache' on Instance uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1388.119411] env[69994]: DEBUG nova.network.neutron [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [{"id": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "address": "fa:16:3e:8a:06:07", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c872b8c-ae", "ovs_interfaceid": "8c872b8c-ae3b-4523-a7c5-2a3ed82baac3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.622646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-29ea539a-d8f4-487b-b5e7-1f15534272f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1388.622924] env[69994]: DEBUG nova.objects.instance [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'migration_context' on Instance uuid 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.126838] env[69994]: DEBUG nova.objects.base [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Object Instance<29ea539a-d8f4-487b-b5e7-1f15534272f9> lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1389.129017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d9b1bc-ec60-4263-b827-a1f981b5e755 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.161378] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d25b0f-e7e8-42ee-bbf0-f69158eeffc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.168861] env[69994]: DEBUG oslo_vmware.api [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1389.168861] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520e21cb-ff61-e653-4a24-0f60616a7349" [ 1389.168861] env[69994]: _type = "Task" [ 1389.168861] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.180333] env[69994]: DEBUG oslo_vmware.api [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]520e21cb-ff61-e653-4a24-0f60616a7349, 'name': SearchDatastore_Task, 'duration_secs': 0.007381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.180722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.181115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.788929] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e138ab5c-de18-462e-9130-bb3e16c5f78f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.796682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d28301a-7d7a-45c3-96b6-99be7915840d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.825291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2374032-e60e-4972-819f-fc5f9557093a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.831725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a27f8eb-95cb-46fe-9299-06ac6e83b6fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.845148] env[69994]: DEBUG nova.compute.provider_tree [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.349160] env[69994]: DEBUG nova.scheduler.client.report [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.360012] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.179s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.917849] env[69994]: INFO nova.scheduler.client.report [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted allocation for migration a989f1bf-dee4-4f57-a259-abc9716bfabd [ 1392.423848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1f5d417-6019-4cc4-92b2-8bb8071f4155 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.075s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.373090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.373090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.373090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.373389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.373445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.375572] env[69994]: INFO nova.compute.manager [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Terminating instance [ 1393.879302] env[69994]: DEBUG nova.compute.manager [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1393.879703] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.880461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63ea1c1-77e0-42dc-af3c-0391b953b2ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.888331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.888577] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd91ff36-36c8-41e9-ba07-c5ff499ee019 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.895448] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1393.895448] env[69994]: value = "task-2926698" [ 1393.895448] env[69994]: _type = "Task" [ 1393.895448] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.904485] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.405186] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926698, 'name': PowerOffVM_Task, 'duration_secs': 0.189457} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.405480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1394.405627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1394.405878] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21d78340-579c-4e9a-bded-02a0a6b9245f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.469492] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1394.469756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1394.469909] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleting the datastore file [datastore2] 29ea539a-d8f4-487b-b5e7-1f15534272f9 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1394.470199] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74042c3b-7319-4d83-8481-18a1d9bfc4af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.478358] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1394.478358] env[69994]: value = "task-2926700" [ 1394.478358] env[69994]: _type = "Task" [ 1394.478358] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.485658] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.988331] env[69994]: DEBUG oslo_vmware.api [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926700, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142451} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.988703] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.988817] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.988954] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.989146] env[69994]: INFO nova.compute.manager [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1394.989410] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1394.989609] env[69994]: DEBUG nova.compute.manager [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1394.989705] env[69994]: DEBUG nova.network.neutron [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1395.465956] env[69994]: DEBUG nova.compute.manager [req-27e5a638-0321-4472-b613-8ba491fd05ea req-8c52150c-b57e-4475-86a9-7a3fc916c8bc service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Received event network-vif-deleted-8c872b8c-ae3b-4523-a7c5-2a3ed82baac3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1395.465956] env[69994]: INFO nova.compute.manager [req-27e5a638-0321-4472-b613-8ba491fd05ea req-8c52150c-b57e-4475-86a9-7a3fc916c8bc service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Neutron deleted interface 8c872b8c-ae3b-4523-a7c5-2a3ed82baac3; detaching it from the instance and deleting it from the info cache [ 1395.465956] env[69994]: DEBUG nova.network.neutron [req-27e5a638-0321-4472-b613-8ba491fd05ea req-8c52150c-b57e-4475-86a9-7a3fc916c8bc service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.936090] env[69994]: DEBUG nova.network.neutron [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.968225] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-309bc35d-f5cb-4c37-93a5-2dc8a1c91a51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.977757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c451b930-f002-4d98-9714-b76289cffa0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.003870] env[69994]: DEBUG nova.compute.manager [req-27e5a638-0321-4472-b613-8ba491fd05ea req-8c52150c-b57e-4475-86a9-7a3fc916c8bc service nova] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Detach interface failed, port_id=8c872b8c-ae3b-4523-a7c5-2a3ed82baac3, reason: Instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1396.438697] env[69994]: INFO nova.compute.manager [-] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Took 1.45 seconds to deallocate network for instance. [ 1396.944862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.945171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.945405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.966367] env[69994]: INFO nova.scheduler.client.report [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted allocations for instance 29ea539a-d8f4-487b-b5e7-1f15534272f9 [ 1397.473507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b938e4d-a76d-4db4-854c-3c3065486a19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "29ea539a-d8f4-487b-b5e7-1f15534272f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.100s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.262809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "2f581516-f617-4650-bbbc-97feafa183f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.263062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.765209] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1399.290337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1399.290337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.291880] env[69994]: INFO nova.compute.claims [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1400.360279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f0105f-45b0-4c97-a5d6-3fc4dce432b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.367842] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ed1f72-bef5-4ff7-af5b-8c7dc077ff03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.397367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee9a380-b713-4372-908a-1f0f63f54f9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.404016] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2c7a9f-84da-46d4-9aa1-ff1a95c3b1dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.416791] env[69994]: DEBUG nova.compute.provider_tree [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.920332] env[69994]: DEBUG nova.scheduler.client.report [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.426719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.427288] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1401.932383] env[69994]: DEBUG nova.compute.utils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1401.933836] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1401.934013] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1401.982683] env[69994]: DEBUG nova.policy [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56900b2a71cc423a868f3c1b81f70172', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2417f6585042417c95491eb3d7cba343', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1402.254266] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Successfully created port: 4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1402.437904] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1403.449101] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1403.475891] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1403.476170] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1403.476330] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1403.476511] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1403.476912] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1403.477129] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1403.477352] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1403.477512] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1403.477687] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1403.477847] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1403.478026] env[69994]: DEBUG nova.virt.hardware [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1403.478908] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3c5ace-0692-4638-88a5-28509f489b11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.487186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059cc4c6-12fa-4cd5-b0c3-f562fb788835 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.602338] env[69994]: DEBUG nova.compute.manager [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Received event network-vif-plugged-4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1403.602606] env[69994]: DEBUG oslo_concurrency.lockutils [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] Acquiring lock "2f581516-f617-4650-bbbc-97feafa183f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.602823] env[69994]: DEBUG oslo_concurrency.lockutils [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.603020] env[69994]: DEBUG oslo_concurrency.lockutils [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.603166] env[69994]: DEBUG nova.compute.manager [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] No waiting events found dispatching network-vif-plugged-4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1403.603364] env[69994]: WARNING nova.compute.manager [req-91f88d5c-667d-4fdd-872d-5b160325141b req-54ab3b26-2c0b-46a6-ada7-b81cff5a6ee2 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Received unexpected event network-vif-plugged-4a7cf72f-19f6-46a2-87cc-d8f019aec72d for instance with vm_state building and task_state spawning. [ 1403.688620] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Successfully updated port: 4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1404.191446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.191706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.191910] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1404.723060] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1404.838338] env[69994]: DEBUG nova.network.neutron [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.341057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.341527] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Instance network_info: |[{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1405.341963] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:69:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a7cf72f-19f6-46a2-87cc-d8f019aec72d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1405.349690] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.349911] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1405.350158] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6faa346c-6951-4266-ae25-2e6d289f7b16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.370253] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.370253] env[69994]: value = "task-2926701" [ 1405.370253] env[69994]: _type = "Task" [ 1405.370253] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.379721] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926701, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.627288] env[69994]: DEBUG nova.compute.manager [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Received event network-changed-4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1405.627496] env[69994]: DEBUG nova.compute.manager [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Refreshing instance network info cache due to event network-changed-4a7cf72f-19f6-46a2-87cc-d8f019aec72d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1405.627738] env[69994]: DEBUG oslo_concurrency.lockutils [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.627897] env[69994]: DEBUG oslo_concurrency.lockutils [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1405.628079] env[69994]: DEBUG nova.network.neutron [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Refreshing network info cache for port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1405.880478] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926701, 'name': CreateVM_Task, 'duration_secs': 0.303755} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.880838] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.881284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.881450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1405.881789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1405.882051] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-965869a2-8117-4023-9f33-dceac9684a65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.887304] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1405.887304] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5235293e-2cbf-2df7-cd69-d4da689d89f2" [ 1405.887304] env[69994]: _type = "Task" [ 1405.887304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.894683] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5235293e-2cbf-2df7-cd69-d4da689d89f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.297116] env[69994]: DEBUG nova.network.neutron [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updated VIF entry in instance network info cache for port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1406.297476] env[69994]: DEBUG nova.network.neutron [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.397431] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5235293e-2cbf-2df7-cd69-d4da689d89f2, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.397732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.397960] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.398204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.398358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.398543] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1406.398798] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58280cdf-c091-4333-9809-17e2a094cb4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.408315] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.408452] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.409201] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a50a91-0e1f-4449-92a2-2d346a58118d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.414900] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1406.414900] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5280cdd4-2ff9-1608-7ecf-a71dd628517c" [ 1406.414900] env[69994]: _type = "Task" [ 1406.414900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.422321] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5280cdd4-2ff9-1608-7ecf-a71dd628517c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.800050] env[69994]: DEBUG oslo_concurrency.lockutils [req-653799d9-732e-4ac5-86cc-81ff2aa73d03 req-4a6b386a-c19a-4601-a86c-5c2f73f188dd service nova] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.925610] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]5280cdd4-2ff9-1608-7ecf-a71dd628517c, 'name': SearchDatastore_Task, 'duration_secs': 0.008936} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.926408] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbb105e6-6cdf-4e92-ade7-0f7119e8ced9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.931990] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1406.931990] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e99c74-6619-1d56-5092-0f9633dfdba6" [ 1406.931990] env[69994]: _type = "Task" [ 1406.931990] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.939233] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e99c74-6619-1d56-5092-0f9633dfdba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.443195] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e99c74-6619-1d56-5092-0f9633dfdba6, 'name': SearchDatastore_Task, 'duration_secs': 0.009376} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.443510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.443775] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.444049] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2265365e-a2be-4612-b5d8-b91ebb4f456e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.453047] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1407.453047] env[69994]: value = "task-2926702" [ 1407.453047] env[69994]: _type = "Task" [ 1407.453047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.459454] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.508252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.508541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.962465] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.414804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.962863] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1407.962971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1407.963147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d45bea4-0028-429c-94ce-b671c0e2f493 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.969751] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1407.969751] env[69994]: value = "task-2926703" [ 1407.969751] env[69994]: _type = "Task" [ 1407.969751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.976811] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.011997] env[69994]: DEBUG nova.compute.utils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1408.480200] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066586} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.480429] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1408.481198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9177048-f50e-4e83-b969-f4160694ab97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.503601] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1408.503844] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38fe5a34-e444-4436-ae3a-6f3d3a489a1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.518098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.524268] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1408.524268] env[69994]: value = "task-2926704" [ 1408.524268] env[69994]: _type = "Task" [ 1408.524268] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.534315] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.035163] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926704, 'name': ReconfigVM_Task, 'duration_secs': 0.2614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.035163] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.035533] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cd1943c-421d-49f8-8059-e5f09172d710 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.041531] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1409.041531] env[69994]: value = "task-2926705" [ 1409.041531] env[69994]: _type = "Task" [ 1409.041531] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.049379] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926705, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.552569] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926705, 'name': Rename_Task, 'duration_secs': 0.134541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.552946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1409.553285] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44d376af-dda4-44df-a467-9cf5fde76f39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.560152] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1409.560152] env[69994]: value = "task-2926706" [ 1409.560152] env[69994]: _type = "Task" [ 1409.560152] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.571599] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.576247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1409.576476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.576694] env[69994]: INFO nova.compute.manager [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Attaching volume a96aa87e-7877-4803-a072-e09481b1c269 to /dev/sdb [ 1409.607478] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fa83ff-a010-4ac9-88b5-59b72aeb3789 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.614280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db857b2-35c9-4da8-aad4-d53fe044b0bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.627596] env[69994]: DEBUG nova.virt.block_device [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating existing volume attachment record: 29ecbae6-e3fe-47e5-808a-c527209a5ecb {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1410.070602] env[69994]: DEBUG oslo_vmware.api [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926706, 'name': PowerOnVM_Task, 'duration_secs': 0.433687} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.070873] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.071075] env[69994]: INFO nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1410.071265] env[69994]: DEBUG nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1410.072130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f68ab6-275f-4b89-9d6e-603557daef9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.590544] env[69994]: INFO nova.compute.manager [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Took 11.32 seconds to build instance. [ 1411.093104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bbc62777-7091-4706-ac40-5dead5b1a611 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.830s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.336417] env[69994]: DEBUG nova.compute.manager [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Received event network-changed-4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1411.336624] env[69994]: DEBUG nova.compute.manager [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Refreshing instance network info cache due to event network-changed-4a7cf72f-19f6-46a2-87cc-d8f019aec72d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1411.336851] env[69994]: DEBUG oslo_concurrency.lockutils [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.336996] env[69994]: DEBUG oslo_concurrency.lockutils [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1411.337231] env[69994]: DEBUG nova.network.neutron [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Refreshing network info cache for port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1412.047939] env[69994]: DEBUG nova.network.neutron [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updated VIF entry in instance network info cache for port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.048296] env[69994]: DEBUG nova.network.neutron [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.550681] env[69994]: DEBUG oslo_concurrency.lockutils [req-615ba006-aba2-46a8-9aab-f755e371871f req-7811abf4-ab31-49ea-b25e-d06263348d90 service nova] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.810202] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.810597] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.316308] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.316553] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.316631] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.316818] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.316908] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.317043] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.317167] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1414.317315] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.672162] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1414.672418] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587688', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'name': 'volume-a96aa87e-7877-4803-a072-e09481b1c269', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c47c71a-65c7-4753-85e6-e0a6a93f08a1', 'attached_at': '', 'detached_at': '', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'serial': 'a96aa87e-7877-4803-a072-e09481b1c269'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1414.673353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec9b320-f508-4fba-95e3-c6cb8de9d74d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.689661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1041de-2a2a-493b-8f19-cdc48a09ef26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.713291] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] volume-a96aa87e-7877-4803-a072-e09481b1c269/volume-a96aa87e-7877-4803-a072-e09481b1c269.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1414.713546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-446f04ac-2758-41e6-90d5-4cdc87904c79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.730980] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1414.730980] env[69994]: value = "task-2926711" [ 1414.730980] env[69994]: _type = "Task" [ 1414.730980] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.739375] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926711, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.819756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.820187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.820292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.820434] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1414.821374] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3d226d-66f7-409e-877a-d7859d05fb0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.830129] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2fda49-74b1-4509-8541-f50f013a9de8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.844473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e46c7e-601c-4f5c-9534-9e0cf046bdc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.851248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc049ec1-9144-4cfb-aef8-f3499b4cd03d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.880225] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180369MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1414.880353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.880560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.241329] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926711, 'name': ReconfigVM_Task, 'duration_secs': 0.328907} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.241634] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfigured VM instance instance-0000007d to attach disk [datastore2] volume-a96aa87e-7877-4803-a072-e09481b1c269/volume-a96aa87e-7877-4803-a072-e09481b1c269.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.246490] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3db4349-7019-4bc7-991f-0f5057627f21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.260925] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1415.260925] env[69994]: value = "task-2926712" [ 1415.260925] env[69994]: _type = "Task" [ 1415.260925] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.268634] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.770451] env[69994]: DEBUG oslo_vmware.api [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926712, 'name': ReconfigVM_Task, 'duration_secs': 0.160736} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.770774] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587688', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'name': 'volume-a96aa87e-7877-4803-a072-e09481b1c269', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c47c71a-65c7-4753-85e6-e0a6a93f08a1', 'attached_at': '', 'detached_at': '', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'serial': 'a96aa87e-7877-4803-a072-e09481b1c269'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1415.906754] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance aefb7903-afd1-4574-bec1-adab769728b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1415.907039] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1415.907238] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1415.907418] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 2f581516-f617-4650-bbbc-97feafa183f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1415.907618] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1415.907761] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1415.967436] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d8fc3a-e330-4821-b428-84a6176ca359 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.974854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d8470a-db8d-4ec2-becf-5af097ffad0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.004449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0e129e-13b4-4b1b-8db2-30553d08f934 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.011155] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f8dc17-c762-4133-a621-690bebbc1c73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.024159] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.527247] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1416.807877] env[69994]: DEBUG nova.objects.instance [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'flavor' on Instance uuid 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.963733] env[69994]: INFO nova.compute.manager [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Rescuing [ 1416.964134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.964409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.964589] env[69994]: DEBUG nova.network.neutron [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.031682] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1417.031867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.151s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.313395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b155ad3d-c463-4623-b94a-5f5ea2a11e6f tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.737s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.695394] env[69994]: DEBUG nova.network.neutron [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.198458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1419.737727] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.737727] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6ff656f-6d85-4efb-b7bf-db7cdc766f07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.745429] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1419.745429] env[69994]: value = "task-2926713" [ 1419.745429] env[69994]: _type = "Task" [ 1419.745429] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.753349] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.255059] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926713, 'name': PowerOffVM_Task, 'duration_secs': 0.200947} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.255325] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.256110] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270139f9-4b2e-4074-8b01-11d86ced1e2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.276635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c716d62e-a118-4e9e-948b-912dad21e6c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.301622] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.301867] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c375b87-75d1-4b39-9ef7-6741cc369dd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.307920] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1420.307920] env[69994]: value = "task-2926714" [ 1420.307920] env[69994]: _type = "Task" [ 1420.307920] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.319373] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1420.319562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.319801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.319948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.320139] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.320353] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee1b8790-b514-4ab0-8668-a0a6706e1c22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.328161] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.328333] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.328969] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e05fe21-2471-47c1-9c44-c90c53a3b4b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.333302] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1420.333302] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522010c7-3c3e-77ef-d3b8-71a659a00afe" [ 1420.333302] env[69994]: _type = "Task" [ 1420.333302] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.341419] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522010c7-3c3e-77ef-d3b8-71a659a00afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.843317] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522010c7-3c3e-77ef-d3b8-71a659a00afe, 'name': SearchDatastore_Task, 'duration_secs': 0.008015} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.844145] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c149cbb3-63e0-40c8-85b8-aee503180684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.848850] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1420.848850] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e8b46-6a8b-1e7c-d555-51b86c7c054a" [ 1420.848850] env[69994]: _type = "Task" [ 1420.848850] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.856414] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e8b46-6a8b-1e7c-d555-51b86c7c054a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.359278] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]522e8b46-6a8b-1e7c-d555-51b86c7c054a, 'name': SearchDatastore_Task, 'duration_secs': 0.008772} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.359545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1421.359803] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1421.360086] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bc793cf-90c0-463f-83f5-4862a6c5f6b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.365999] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1421.365999] env[69994]: value = "task-2926715" [ 1421.365999] env[69994]: _type = "Task" [ 1421.365999] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.373023] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.876019] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.414071} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.876412] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk. [ 1421.877050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092aab01-222d-4e7d-80d7-dee00542e233 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.903726] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.903958] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7ba0997-1e1a-4c22-9300-5be2a08eed78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.920809] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1421.920809] env[69994]: value = "task-2926716" [ 1421.920809] env[69994]: _type = "Task" [ 1421.920809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.927869] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926716, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.430719] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926716, 'name': ReconfigVM_Task, 'duration_secs': 0.327718} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.431020] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.432127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212a3e39-59bf-48ba-b1c6-598eee56b864 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.458947] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa51ceca-741e-4b68-8281-015a86991953 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.474479] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1422.474479] env[69994]: value = "task-2926717" [ 1422.474479] env[69994]: _type = "Task" [ 1422.474479] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.483990] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926717, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.984045] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926717, 'name': ReconfigVM_Task, 'duration_secs': 0.155043} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.984445] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1422.984547] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-995e2c99-2f9c-4cff-9543-c5dae36fb1d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.991478] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1422.991478] env[69994]: value = "task-2926718" [ 1422.991478] env[69994]: _type = "Task" [ 1422.991478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.006888] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926718, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.501282] env[69994]: DEBUG oslo_vmware.api [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926718, 'name': PowerOnVM_Task, 'duration_secs': 0.35702} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.501548] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1423.504292] env[69994]: DEBUG nova.compute.manager [None req-2ce780e8-cc8e-401b-9fd5-eeb80fb30e61 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1423.505334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0952124-e35e-4d31-97d8-20ed9cdcf07f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.976071] env[69994]: INFO nova.compute.manager [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Unrescuing [ 1424.976450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.976519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.976687] env[69994]: DEBUG nova.network.neutron [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1425.728972] env[69994]: DEBUG nova.network.neutron [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.232445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.233088] env[69994]: DEBUG nova.objects.instance [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'flavor' on Instance uuid 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.738752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4474e9f8-3ba1-4914-99ae-e35d088dc8c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.764233] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1426.764554] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-268a57e1-b574-447d-b0e2-3ec35023ac75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.770758] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1426.770758] env[69994]: value = "task-2926719" [ 1426.770758] env[69994]: _type = "Task" [ 1426.770758] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.778273] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.280524] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926719, 'name': PowerOffVM_Task, 'duration_secs': 0.224504} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.280898] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1427.286134] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfiguring VM instance instance-0000007d to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1427.286402] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59fc0853-8bb6-459f-91fd-7acdd3b6c431 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.304097] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1427.304097] env[69994]: value = "task-2926720" [ 1427.304097] env[69994]: _type = "Task" [ 1427.304097] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.311249] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926720, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.814149] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926720, 'name': ReconfigVM_Task, 'duration_secs': 0.225609} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.814425] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfigured VM instance instance-0000007d to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1427.814640] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1427.814868] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de03e8cc-69de-460a-a3b6-622cc3827ccf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.821063] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1427.821063] env[69994]: value = "task-2926721" [ 1427.821063] env[69994]: _type = "Task" [ 1427.821063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.828032] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.330334] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926721, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.831459] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926721, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.331821] env[69994]: DEBUG oslo_vmware.api [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926721, 'name': PowerOnVM_Task, 'duration_secs': 1.017968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.332165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.332334] env[69994]: DEBUG nova.compute.manager [None req-04dc54ec-5901-4a22-a1b5-56c79a6b3cab tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1429.333069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec2e2b0-6d68-41de-bb9f-a04faa0e4eab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.831424] env[69994]: DEBUG nova.compute.manager [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1430.831698] env[69994]: DEBUG nova.compute.manager [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing instance network info cache due to event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1430.831847] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.831991] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1430.832170] env[69994]: DEBUG nova.network.neutron [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1431.526630] env[69994]: DEBUG nova.network.neutron [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updated VIF entry in instance network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1431.527794] env[69994]: DEBUG nova.network.neutron [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.029542] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ec4d0b6-7dc4-4285-a1b2-ff8fa5a064c5 req-b59bb286-52c5-4acc-b533-1c4dca1ab1dd service nova] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.857240] env[69994]: DEBUG nova.compute.manager [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1432.857406] env[69994]: DEBUG nova.compute.manager [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing instance network info cache due to event network-changed-54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1432.857617] env[69994]: DEBUG oslo_concurrency.lockutils [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] Acquiring lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.857782] env[69994]: DEBUG oslo_concurrency.lockutils [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] Acquired lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1432.857958] env[69994]: DEBUG nova.network.neutron [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Refreshing network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1433.546929] env[69994]: DEBUG nova.network.neutron [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updated VIF entry in instance network info cache for port 54fdc013-6818-4f6e-8b1e-a5b46c4879fb. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.547322] env[69994]: DEBUG nova.network.neutron [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [{"id": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "address": "fa:16:3e:1e:2a:e4", "network": {"id": "a0ab2b05-0ee7-4baf-8ae7-17b1d680a728", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-486325023-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c3dcb9ec62247adb210b83c9de8bf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54fdc013-68", "ovs_interfaceid": "54fdc013-6818-4f6e-8b1e-a5b46c4879fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.049689] env[69994]: DEBUG oslo_concurrency.lockutils [req-b71eb21f-14b0-466c-9098-3d69da319b30 req-5f5aa537-bc9a-477e-b437-5ec7c89f0554 service nova] Releasing lock "refresh_cache-1c47c71a-65c7-4753-85e6-e0a6a93f08a1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1449.074285] env[69994]: DEBUG nova.compute.manager [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1449.595285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.595571] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1450.101600] env[69994]: INFO nova.compute.claims [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1450.608320] env[69994]: INFO nova.compute.resource_tracker [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating resource usage from migration 6e129a88-ce81-4837-993d-d273409d4e38 [ 1450.671661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc33d436-e898-435b-9d86-5ce4627e5e64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.680012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1aa515-58c3-401f-b3f9-7cdb15be544e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.710247] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c13f6f-eaff-4dac-9370-b8832f60323e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.717180] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264a7603-61e3-4f84-93e6-6f3daa0a05c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.729820] env[69994]: DEBUG nova.compute.provider_tree [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.233021] env[69994]: DEBUG nova.scheduler.client.report [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1451.738571] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.143s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.738765] env[69994]: INFO nova.compute.manager [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Migrating [ 1452.253288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.253659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1452.253659] env[69994]: DEBUG nova.network.neutron [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1452.955981] env[69994]: DEBUG nova.network.neutron [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.459063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1454.974072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e619bd7-974d-4a52-b442-a4aecb472909 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.993627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1455.499320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1455.499628] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18c03b48-5f0e-4486-86ef-30205be30409 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.507213] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1455.507213] env[69994]: value = "task-2926722" [ 1455.507213] env[69994]: _type = "Task" [ 1455.507213] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.514714] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.017560] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926722, 'name': PowerOffVM_Task, 'duration_secs': 0.211194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.017929] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1456.018035] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1456.524034] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1456.524290] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.524451] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1456.524641] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.524779] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1456.524925] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1456.525192] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1456.525368] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1456.525537] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1456.525699] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1456.525869] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1456.531949] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34cf0630-64d2-4561-8817-e2c86c5241e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.546959] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1456.546959] env[69994]: value = "task-2926723" [ 1456.546959] env[69994]: _type = "Task" [ 1456.546959] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.554661] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926723, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.056655] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926723, 'name': ReconfigVM_Task, 'duration_secs': 0.151538} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.057055] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1457.562984] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1457.563258] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.563309] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1457.563449] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.563598] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1457.563748] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1457.563951] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1457.564185] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1457.564359] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1457.564522] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1457.564696] env[69994]: DEBUG nova.virt.hardware [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1457.569864] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfiguring VM instance instance-0000007e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1457.570162] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a55ac33a-a30e-4b8d-a792-467274206698 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.588885] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1457.588885] env[69994]: value = "task-2926724" [ 1457.588885] env[69994]: _type = "Task" [ 1457.588885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.596228] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926724, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.098794] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926724, 'name': ReconfigVM_Task, 'duration_secs': 0.166798} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.099156] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfigured VM instance instance-0000007e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1458.099802] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e96482-669f-40a6-a593-7dcf6a5af088 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.122917] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1458.123220] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7131234-34ac-4236-9601-69a48a83405c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.141481] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1458.141481] env[69994]: value = "task-2926725" [ 1458.141481] env[69994]: _type = "Task" [ 1458.141481] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.149557] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926725, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.651133] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926725, 'name': ReconfigVM_Task, 'duration_secs': 0.265233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.651436] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.651830] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1459.159775] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a36e1c-ea29-4fa7-a883-986bd7d65440 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.178164] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e91b325-980e-4be2-aa91-f1262672a622 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.195229] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1459.733430] env[69994]: DEBUG nova.network.neutron [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1460.755260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "2f581516-f617-4650-bbbc-97feafa183f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.755673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.755673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1461.789669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.789939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.790069] env[69994]: DEBUG nova.network.neutron [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1462.477360] env[69994]: DEBUG nova.network.neutron [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.980702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.512690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdfc575-7d84-42a4-864a-82206bd8e218 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.532277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af9fbaf-f223-42b9-9847-a61f4c11291b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.539416] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1464.046052] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.046459] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8826d2fd-9316-42bf-9b47-24f7b30adf34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.054304] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1464.054304] env[69994]: value = "task-2926726" [ 1464.054304] env[69994]: _type = "Task" [ 1464.054304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.062366] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.564790] env[69994]: DEBUG oslo_vmware.api [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926726, 'name': PowerOnVM_Task, 'duration_secs': 0.36616} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.565084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.565322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-55eeb243-d8e6-45c7-8244-70e94ef624c8 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance '2f581516-f617-4650-bbbc-97feafa183f6' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1465.326146] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.326497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.830024] env[69994]: INFO nova.compute.manager [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Detaching volume a96aa87e-7877-4803-a072-e09481b1c269 [ 1465.859511] env[69994]: INFO nova.virt.block_device [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Attempting to driver detach volume a96aa87e-7877-4803-a072-e09481b1c269 from mountpoint /dev/sdb [ 1465.859757] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1465.859941] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587688', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'name': 'volume-a96aa87e-7877-4803-a072-e09481b1c269', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c47c71a-65c7-4753-85e6-e0a6a93f08a1', 'attached_at': '', 'detached_at': '', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'serial': 'a96aa87e-7877-4803-a072-e09481b1c269'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1465.860825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c883cb34-6cd5-44f9-8090-b6580167ab6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.882694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5241ad30-1e05-489f-9a70-726afbcbfa2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.889098] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3ac592-3390-4152-a3f1-7b5ec30c66c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.908299] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee256f49-986e-4d38-a364-6528a50764e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.921914] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] The volume has not been displaced from its original location: [datastore2] volume-a96aa87e-7877-4803-a072-e09481b1c269/volume-a96aa87e-7877-4803-a072-e09481b1c269.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1465.927221] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1465.928468] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-234ca2f8-97b3-43bd-96ba-84004a4c65ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.949369] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1465.949369] env[69994]: value = "task-2926727" [ 1465.949369] env[69994]: _type = "Task" [ 1465.949369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.957703] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.459779] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926727, 'name': ReconfigVM_Task, 'duration_secs': 0.218152} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.459779] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1466.464916] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fd73b33-a80a-4517-a0de-c67e19dcfff0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.480798] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1466.480798] env[69994]: value = "task-2926728" [ 1466.480798] env[69994]: _type = "Task" [ 1466.480798] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.489742] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926728, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.549359] env[69994]: DEBUG nova.network.neutron [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Port 4a7cf72f-19f6-46a2-87cc-d8f019aec72d binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1466.549682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.549870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1466.550082] env[69994]: DEBUG nova.network.neutron [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1466.990251] env[69994]: DEBUG oslo_vmware.api [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926728, 'name': ReconfigVM_Task, 'duration_secs': 0.135089} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.990545] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-587688', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'name': 'volume-a96aa87e-7877-4803-a072-e09481b1c269', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1c47c71a-65c7-4753-85e6-e0a6a93f08a1', 'attached_at': '', 'detached_at': '', 'volume_id': 'a96aa87e-7877-4803-a072-e09481b1c269', 'serial': 'a96aa87e-7877-4803-a072-e09481b1c269'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1467.277978] env[69994]: DEBUG nova.network.neutron [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.531495] env[69994]: DEBUG nova.objects.instance [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'flavor' on Instance uuid 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1467.780578] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.283993] env[69994]: DEBUG nova.compute.manager [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1468.284298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.284555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.538299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5564ccb-fb9e-4ea6-beaf-e95fa22c9e73 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.787293] env[69994]: DEBUG nova.objects.instance [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'migration_context' on Instance uuid 2f581516-f617-4650-bbbc-97feafa183f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.420029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f41e79-ac0c-4bd6-9842-e4b42a7e99d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.428961] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197da8a8-9feb-4dde-a90d-04a8fdb52cdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.457922] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b10843-8476-4e95-8eeb-32dd0a085c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.464573] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a65cd5-e60f-4e32-b0a4-54b59dab6414 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.477170] env[69994]: DEBUG nova.compute.provider_tree [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.531391] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.531581] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.531736] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.531879] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.532036] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.532170] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1469.532315] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.633590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1469.633930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1469.634058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1469.634289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1469.634462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1469.636401] env[69994]: INFO nova.compute.manager [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Terminating instance [ 1469.980403] env[69994]: DEBUG nova.scheduler.client.report [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1470.034738] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1470.139939] env[69994]: DEBUG nova.compute.manager [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1470.140195] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1470.141119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11b6baa-4abf-43a0-9330-5bf9bb5a0886 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.148488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1470.148744] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a409157-de8a-4b1d-9450-421dba16f6e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.154715] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1470.154715] env[69994]: value = "task-2926729" [ 1470.154715] env[69994]: _type = "Task" [ 1470.154715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.161873] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.663758] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926729, 'name': PowerOffVM_Task, 'duration_secs': 0.16981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.664196] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1470.664254] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1470.664469] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f43aae0-6a74-497a-ae86-d5d2fc5cab32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.725149] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1470.725446] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1470.725650] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleting the datastore file [datastore1] 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1470.725909] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30704012-efe3-4772-a305-ed0f28fd7691 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.731768] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1470.731768] env[69994]: value = "task-2926731" [ 1470.731768] env[69994]: _type = "Task" [ 1470.731768] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.738813] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.991432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1470.996911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.962s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1470.997106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1470.997271] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1470.999845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef16d27-9765-4834-ae9a-3f97e587e285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.011547] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d18d1a1-09a8-4fef-a379-0681ec5f2d0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.026611] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5e828c-343c-4df7-9bc9-6ec95bfcbf79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.032739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d54de8e-c0f5-4c97-a30b-cd53f407de80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.061372] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180529MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1471.061506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1471.061707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1471.243107] env[69994]: DEBUG oslo_vmware.api [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134387} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.243306] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.243485] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1471.243661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1471.243834] env[69994]: INFO nova.compute.manager [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1471.244084] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1471.244320] env[69994]: DEBUG nova.compute.manager [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1471.244413] env[69994]: DEBUG nova.network.neutron [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1471.759291] env[69994]: DEBUG nova.compute.manager [req-03f9352f-8633-4362-add7-b4cfede91f75 req-8bcf3f00-96cd-416d-abb2-60af86dec03a service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Received event network-vif-deleted-54fdc013-6818-4f6e-8b1e-a5b46c4879fb {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1471.759291] env[69994]: INFO nova.compute.manager [req-03f9352f-8633-4362-add7-b4cfede91f75 req-8bcf3f00-96cd-416d-abb2-60af86dec03a service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Neutron deleted interface 54fdc013-6818-4f6e-8b1e-a5b46c4879fb; detaching it from the instance and deleting it from the info cache [ 1471.759291] env[69994]: DEBUG nova.network.neutron [req-03f9352f-8633-4362-add7-b4cfede91f75 req-8bcf3f00-96cd-416d-abb2-60af86dec03a service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.088313] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance aefb7903-afd1-4574-bec1-adab769728b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1472.088467] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1472.088592] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1472.241819] env[69994]: DEBUG nova.network.neutron [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.262239] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e947e531-b5b3-45f0-80cd-3a84ba9a3c5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.272303] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18f3678-d61a-4322-83b7-ab94d98c23fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.298086] env[69994]: DEBUG nova.compute.manager [req-03f9352f-8633-4362-add7-b4cfede91f75 req-8bcf3f00-96cd-416d-abb2-60af86dec03a service nova] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Detach interface failed, port_id=54fdc013-6818-4f6e-8b1e-a5b46c4879fb, reason: Instance 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1472.529752] env[69994]: INFO nova.compute.manager [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Swapping old allocation on dict_keys(['2173cd1f-90eb-4aab-b51d-83c140d1a7be']) held by migration 6e129a88-ce81-4837-993d-d273409d4e38 for instance [ 1472.552747] env[69994]: DEBUG nova.scheduler.client.report [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Overwriting current allocation {'allocations': {'2173cd1f-90eb-4aab-b51d-83c140d1a7be': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 191}}, 'project_id': '2417f6585042417c95491eb3d7cba343', 'user_id': '56900b2a71cc423a868f3c1b81f70172', 'consumer_generation': 1} on consumer 2f581516-f617-4650-bbbc-97feafa183f6 {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1472.591519] env[69994]: INFO nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 6e129a88-ce81-4837-993d-d273409d4e38 has allocations against this compute host but is not found in the database. [ 1472.591650] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 2f581516-f617-4650-bbbc-97feafa183f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1472.591896] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1472.591990] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1472.636679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.636875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1472.637066] env[69994]: DEBUG nova.network.neutron [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1472.653449] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85b8934-be0f-40a8-bbe4-88eda2c18404 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.660541] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8d58a2-a381-44eb-bafb-81db42b04357 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.692895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e282d9d-19a8-46b3-b009-024ca96a1557 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.699997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064f488a-1523-4762-b52e-6072573de0f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.713009] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.747530] env[69994]: INFO nova.compute.manager [-] [instance: 1c47c71a-65c7-4753-85e6-e0a6a93f08a1] Took 1.50 seconds to deallocate network for instance. [ 1473.219905] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1473.253725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1473.329099] env[69994]: DEBUG nova.network.neutron [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [{"id": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "address": "fa:16:3e:1d:69:25", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a7cf72f-19", "ovs_interfaceid": "4a7cf72f-19f6-46a2-87cc-d8f019aec72d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.725186] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1473.725455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.664s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.725749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.472s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1473.725975] env[69994]: DEBUG nova.objects.instance [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'resources' on Instance uuid 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1473.727474] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.727620] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1473.832251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-2f581516-f617-4650-bbbc-97feafa183f6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1473.832688] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1473.832966] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79a76782-2ee0-4ded-8543-f0dd0ad2e4d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.840138] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1473.840138] env[69994]: value = "task-2926732" [ 1473.840138] env[69994]: _type = "Task" [ 1473.840138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.848238] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.236821] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] There are 21 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1474.237209] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ac72ed6c-15f7-47e3-83a0-abcd85bba128] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1474.291335] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2300a5cb-31d0-4ec9-921b-b9ee24a21eac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.299053] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc598b33-844d-49d3-9fb6-5f1ba13153b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.329081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6548cd-990f-4237-8fda-c998f513cc02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.335951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7240b629-e811-4463-9c64-c89dbc273381 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.350751] env[69994]: DEBUG nova.compute.provider_tree [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.356328] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926732, 'name': PowerOffVM_Task, 'duration_secs': 0.223025} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.356572] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1474.357197] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1474.357413] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.357570] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1474.357794] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.357962] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1474.358133] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1474.358338] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1474.358496] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1474.358658] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1474.358815] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1474.358984] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1474.364743] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e417a7b1-fd5b-4ac4-adc1-37cfa25559ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.379650] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1474.379650] env[69994]: value = "task-2926733" [ 1474.379650] env[69994]: _type = "Task" [ 1474.379650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.386885] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.739927] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ba823cb8-570b-465f-a566-524b82ebc1ba] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1474.853936] env[69994]: DEBUG nova.scheduler.client.report [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1474.889812] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926733, 'name': ReconfigVM_Task, 'duration_secs': 0.126175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.890566] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b1d0b4-15d5-4e28-a218-d1e28811ed17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.909782] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1474.910028] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.910193] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1474.910379] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.910527] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1474.910673] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1474.910918] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1474.911105] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1474.911277] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1474.911439] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1474.911607] env[69994]: DEBUG nova.virt.hardware [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1474.912368] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c0d1f63-1a3d-4282-8e6e-03323a725418 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.917747] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1474.917747] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525fd253-42ea-72b2-b05b-48a58c47965b" [ 1474.917747] env[69994]: _type = "Task" [ 1474.917747] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.925097] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525fd253-42ea-72b2-b05b-48a58c47965b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.243421] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 328868f0-2fe9-4c04-a669-54b073c53b14] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1475.358657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1475.380974] env[69994]: INFO nova.scheduler.client.report [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted allocations for instance 1c47c71a-65c7-4753-85e6-e0a6a93f08a1 [ 1475.427860] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]525fd253-42ea-72b2-b05b-48a58c47965b, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.433498] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfiguring VM instance instance-0000007e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1475.434544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea219aaf-b14c-43c1-bf5b-7af6a3c5829d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.451545] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1475.451545] env[69994]: value = "task-2926734" [ 1475.451545] env[69994]: _type = "Task" [ 1475.451545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.458957] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.747136] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: ead967bc-ba1d-4c3c-8dbb-e284b444ffcd] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1475.889595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-314bd73a-fef5-4928-93af-2267a1141224 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "1c47c71a-65c7-4753-85e6-e0a6a93f08a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.256s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1475.961167] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926734, 'name': ReconfigVM_Task, 'duration_secs': 0.17937} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.961400] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfigured VM instance instance-0000007e to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1475.962183] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c515bb-7964-4d97-92f7-ff11855eb22c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.984021] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1475.984155] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e237efff-acaf-493c-a749-637ff88211f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.001139] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1476.001139] env[69994]: value = "task-2926735" [ 1476.001139] env[69994]: _type = "Task" [ 1476.001139] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.009031] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926735, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.250567] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 94169894-f772-41c9-95a1-ddf622f2c9f6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1476.510911] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926735, 'name': ReconfigVM_Task, 'duration_secs': 0.313377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.511198] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6/2f581516-f617-4650-bbbc-97feafa183f6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1476.511968] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5837a283-c3ef-4eb0-8e6c-97f853e2d952 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.530669] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f792440d-d2b1-4502-a669-cb9d281dea2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.550941] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f09184c-dc24-416c-a368-7cd17d08b0fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.568388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed9288a-7a68-4c37-8a55-cbe2bf831044 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.574600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1476.574820] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c35ab4d-7985-439e-9ffd-5fd647c5a650 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.581413] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1476.581413] env[69994]: value = "task-2926737" [ 1476.581413] env[69994]: _type = "Task" [ 1476.581413] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.597356] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.754202] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 966e3672-f85b-467d-8821-1e14533ee629] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1477.091620] env[69994]: DEBUG oslo_vmware.api [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926737, 'name': PowerOnVM_Task, 'duration_secs': 0.348608} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.091887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.258218] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 5784a102-fd07-4717-a88b-ac94ad578af6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1477.761295] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 2ae41965-d345-4358-92bc-7e43d81aca50] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1478.103324] env[69994]: INFO nova.compute.manager [None req-7f7c6e4d-6d5b-401e-9572-38907ff0e18d tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance to original state: 'active' [ 1478.265210] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 9d915860-6789-4574-b30f-a7998c07b53e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1478.769144] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 1ec891c7-b196-47d8-9d0a-0f4d3f3cf51e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1479.257789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "2f581516-f617-4650-bbbc-97feafa183f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1479.258061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1479.258277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "2f581516-f617-4650-bbbc-97feafa183f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1479.258457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1479.258622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.260958] env[69994]: INFO nova.compute.manager [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Terminating instance [ 1479.271008] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 7963eb9f-66a1-417b-928b-3b5cef7847be] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1479.764713] env[69994]: DEBUG nova.compute.manager [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1479.764900] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.765882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d89efb5-02c9-4820-91d0-1eda5c652575 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.773728] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: fe716314-1b5d-4b05-b34d-dfd444ed0c8d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1479.775514] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1479.775917] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd9bf7fa-9427-46d1-876e-08785c173f9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.781874] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1479.781874] env[69994]: value = "task-2926738" [ 1479.781874] env[69994]: _type = "Task" [ 1479.781874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.789838] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.276930] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: a828caf9-2b61-4449-b1ee-25f0828380d1] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1480.295440] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926738, 'name': PowerOffVM_Task, 'duration_secs': 0.202566} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.295810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1480.296070] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1480.296694] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4c71feb-f2d7-4495-bb92-e1c0d1260394 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.372631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1480.372867] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1480.373066] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleting the datastore file [datastore2] 2f581516-f617-4650-bbbc-97feafa183f6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.373344] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43aa548c-7604-4aab-b763-1d623988f999 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.380251] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1480.380251] env[69994]: value = "task-2926740" [ 1480.380251] env[69994]: _type = "Task" [ 1480.380251] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.388201] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.776979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.777278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.777506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.777688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.777856] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1480.779950] env[69994]: INFO nova.compute.manager [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Terminating instance [ 1480.781321] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 395a4d39-29ae-4443-949f-4737e7e2341e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1480.890141] env[69994]: DEBUG oslo_vmware.api [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14393} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.890389] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1480.890569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1480.890742] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1480.890913] env[69994]: INFO nova.compute.manager [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1480.891167] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1480.891356] env[69994]: DEBUG nova.compute.manager [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1480.891451] env[69994]: DEBUG nova.network.neutron [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1481.284936] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 87c5b8e4-166c-44b9-a179-1afaef751434] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1481.287189] env[69994]: DEBUG nova.compute.manager [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1481.287391] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1481.288427] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dce2a8-5fe4-41a8-a4ae-1c2ba6fce794 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.295942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1481.296616] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3602e0b5-3f53-48dc-9e57-3ec76fb1555e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.302818] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1481.302818] env[69994]: value = "task-2926741" [ 1481.302818] env[69994]: _type = "Task" [ 1481.302818] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.310855] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.335058] env[69994]: DEBUG nova.compute.manager [req-0d96a264-c7a4-4e66-b941-5892f7d5cc01 req-0bba9e5f-f197-4fc1-a66e-ab987070949a service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Received event network-vif-deleted-4a7cf72f-19f6-46a2-87cc-d8f019aec72d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1481.335277] env[69994]: INFO nova.compute.manager [req-0d96a264-c7a4-4e66-b941-5892f7d5cc01 req-0bba9e5f-f197-4fc1-a66e-ab987070949a service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Neutron deleted interface 4a7cf72f-19f6-46a2-87cc-d8f019aec72d; detaching it from the instance and deleting it from the info cache [ 1481.335509] env[69994]: DEBUG nova.network.neutron [req-0d96a264-c7a4-4e66-b941-5892f7d5cc01 req-0bba9e5f-f197-4fc1-a66e-ab987070949a service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.789475] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 29ea539a-d8f4-487b-b5e7-1f15534272f9] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1481.812278] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926741, 'name': PowerOffVM_Task, 'duration_secs': 0.214448} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.812545] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1481.812714] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1481.812960] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10ea1df5-2d2f-4904-8714-28e7875043d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.818294] env[69994]: DEBUG nova.network.neutron [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.837912] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c473a72-db49-4008-b757-22a86b0f7c0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.848630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19286f75-9077-4a5b-a79c-0233cd45412a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.874917] env[69994]: DEBUG nova.compute.manager [req-0d96a264-c7a4-4e66-b941-5892f7d5cc01 req-0bba9e5f-f197-4fc1-a66e-ab987070949a service nova] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Detach interface failed, port_id=4a7cf72f-19f6-46a2-87cc-d8f019aec72d, reason: Instance 2f581516-f617-4650-bbbc-97feafa183f6 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1481.876335] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1481.876536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1481.876717] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleting the datastore file [datastore1] 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1481.876964] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10a852ea-4a13-4a69-9011-c6c062572303 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.882239] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1481.882239] env[69994]: value = "task-2926743" [ 1481.882239] env[69994]: _type = "Task" [ 1481.882239] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.889776] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.292950] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 60f6d502-0fef-4764-8c1f-1b1d5ab3db41] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1482.320823] env[69994]: INFO nova.compute.manager [-] [instance: 2f581516-f617-4650-bbbc-97feafa183f6] Took 1.43 seconds to deallocate network for instance. [ 1482.392096] env[69994]: DEBUG oslo_vmware.api [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.392387] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1482.392562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1482.392733] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1482.392906] env[69994]: INFO nova.compute.manager [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1482.393164] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1482.393358] env[69994]: DEBUG nova.compute.manager [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1482.393455] env[69994]: DEBUG nova.network.neutron [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1482.796158] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 3c374550-d65b-494a-89d7-60720f6b44dc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1482.827025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.827339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.827528] env[69994]: DEBUG nova.objects.instance [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'resources' on Instance uuid 2f581516-f617-4650-bbbc-97feafa183f6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1483.160160] env[69994]: DEBUG nova.network.neutron [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.299513] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 85293c91-f363-4085-9eb8-2bf6514fa2f1] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1483.360742] env[69994]: DEBUG nova.compute.manager [req-25bbbc34-8f9e-4a95-8658-ef0906811097 req-15cad15e-094a-4f87-9b90-32e573524cdb service nova] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Received event network-vif-deleted-30d5769c-ab0c-4501-b426-4747886e04e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1483.383234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7675bba-9cad-49ca-9f34-f8d70f13dd37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.391179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766fab95-1e92-458c-8bc3-7cccd9ea2d8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.420406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d181aa-d3e2-437d-9c3b-c79b6e647f40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.427230] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2addc7e3-fd02-40dc-a449-85b246a123cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.441255] env[69994]: DEBUG nova.compute.provider_tree [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.662282] env[69994]: INFO nova.compute.manager [-] [instance: 27d2bf57-80ec-4bc4-b87b-560f7dfd6524] Took 1.27 seconds to deallocate network for instance. [ 1483.802604] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 68eba44a-0989-47dc-a88b-102d9aa34c5d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1483.943941] env[69994]: DEBUG nova.scheduler.client.report [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1484.168507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1484.305687] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] [instance: 8001cb13-6a52-451b-b4b6-57b893975079] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1484.448693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.450992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.283s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1484.451341] env[69994]: DEBUG nova.objects.instance [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'resources' on Instance uuid 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.469217] env[69994]: INFO nova.scheduler.client.report [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted allocations for instance 2f581516-f617-4650-bbbc-97feafa183f6 [ 1484.808551] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.808733] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1484.975982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-64f15093-c0c6-4807-b0db-810337245e15 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "2f581516-f617-4650-bbbc-97feafa183f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.718s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.989991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3c1966-d7e8-4741-a874-6bc504c32fdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.998450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fdeddd-b083-4865-b1a0-44ae34d4b1f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.029245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566a9f0e-b285-4846-b390-80b8a79f1bc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.036364] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b6ce86-51d4-4063-8d03-86b6ae64f4ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.049444] env[69994]: DEBUG nova.compute.provider_tree [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.310867] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.552444] env[69994]: DEBUG nova.scheduler.client.report [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1486.057906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1486.076083] env[69994]: INFO nova.scheduler.client.report [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted allocations for instance 27d2bf57-80ec-4bc4-b87b-560f7dfd6524 [ 1486.292491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.292740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1486.582772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c242b47d-69f0-4464-9fe6-fa6564933803 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "27d2bf57-80ec-4bc4-b87b-560f7dfd6524" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.805s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1486.794949] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1487.315784] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.316074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.318902] env[69994]: INFO nova.compute.claims [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1487.423735] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "aefb7903-afd1-4574-bec1-adab769728b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.424020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.424236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "aefb7903-afd1-4574-bec1-adab769728b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.424425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.424597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.426649] env[69994]: INFO nova.compute.manager [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Terminating instance [ 1487.813349] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.813623] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.930037] env[69994]: DEBUG nova.compute.manager [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1487.930240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1487.931221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33555c6-82d0-4732-a22a-085fc5c06f80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.938887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.939124] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da6a1525-821d-4ccf-a372-4a0d0e9e10d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.944951] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1487.944951] env[69994]: value = "task-2926744" [ 1487.944951] env[69994]: _type = "Task" [ 1487.944951] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.952794] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.364940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566c1ec5-7e75-459c-98ea-5a1c739c562a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.372638] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de70593d-63ca-417f-aedc-58c3de79dc8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.401799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6715acf2-a887-4cfd-955e-fb6ed2297cb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.409348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66ac8ee-2472-4210-a6c6-453f215249b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.422128] env[69994]: DEBUG nova.compute.provider_tree [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.454972] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926744, 'name': PowerOffVM_Task, 'duration_secs': 0.215524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.455238] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.455406] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1488.455663] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3837b80-3938-42c5-809b-1a23a686e546 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.519728] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1488.520068] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1488.520346] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleting the datastore file [datastore1] aefb7903-afd1-4574-bec1-adab769728b5 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1488.520699] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-566052a4-5948-449d-be13-d85a4f43eac3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.527421] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for the task: (returnval){ [ 1488.527421] env[69994]: value = "task-2926746" [ 1488.527421] env[69994]: _type = "Task" [ 1488.527421] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.537567] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.925638] env[69994]: DEBUG nova.scheduler.client.report [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.037017] env[69994]: DEBUG oslo_vmware.api [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Task: {'id': task-2926746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.037283] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1489.037468] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1489.037646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1489.037822] env[69994]: INFO nova.compute.manager [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1489.038086] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1489.038278] env[69994]: DEBUG nova.compute.manager [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1489.038370] env[69994]: DEBUG nova.network.neutron [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1489.274228] env[69994]: DEBUG nova.compute.manager [req-f3467871-e445-406b-9bb9-ff483e4c1af5 req-73fe0754-ac49-4fa2-928e-27b9aa125708 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Received event network-vif-deleted-f7b1d9c4-f85a-4f93-a48a-87a59a84831b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1489.274228] env[69994]: INFO nova.compute.manager [req-f3467871-e445-406b-9bb9-ff483e4c1af5 req-73fe0754-ac49-4fa2-928e-27b9aa125708 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Neutron deleted interface f7b1d9c4-f85a-4f93-a48a-87a59a84831b; detaching it from the instance and deleting it from the info cache [ 1489.274665] env[69994]: DEBUG nova.network.neutron [req-f3467871-e445-406b-9bb9-ff483e4c1af5 req-73fe0754-ac49-4fa2-928e-27b9aa125708 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.440891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.125s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.441464] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1489.755466] env[69994]: DEBUG nova.network.neutron [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.776663] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f74506fc-40f3-4208-b745-adf6abed409e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.787024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17547d8b-d7cc-4693-bb27-7f91d8e1f286 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.809945] env[69994]: DEBUG nova.compute.manager [req-f3467871-e445-406b-9bb9-ff483e4c1af5 req-73fe0754-ac49-4fa2-928e-27b9aa125708 service nova] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Detach interface failed, port_id=f7b1d9c4-f85a-4f93-a48a-87a59a84831b, reason: Instance aefb7903-afd1-4574-bec1-adab769728b5 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1489.947037] env[69994]: DEBUG nova.compute.utils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1489.947699] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1489.947874] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.997594] env[69994]: DEBUG nova.policy [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56900b2a71cc423a868f3c1b81f70172', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2417f6585042417c95491eb3d7cba343', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1490.237171] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Successfully created port: 01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.257896] env[69994]: INFO nova.compute.manager [-] [instance: aefb7903-afd1-4574-bec1-adab769728b5] Took 1.22 seconds to deallocate network for instance. [ 1490.450935] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1490.763668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1490.763983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1490.764226] env[69994]: DEBUG nova.objects.instance [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lazy-loading 'resources' on Instance uuid aefb7903-afd1-4574-bec1-adab769728b5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1491.308505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46a461c-a972-4182-a623-9646bd076961 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.316048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af551a38-86a9-49fd-bfa3-2e33eff53360 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.345912] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ce47bb-2497-41b4-a372-0599407446a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.353451] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a4cda8-7112-49ff-8a2b-30817909a1c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.367304] env[69994]: DEBUG nova.compute.provider_tree [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.461630] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1491.489501] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T12:28:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T12:28:30Z,direct_url=,disk_format='vmdk',id=f75f967d-5bd8-4c15-9a52-96f7e9dd9d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3930bfd42cfa44e8b0ca650284b8ab00',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T12:28:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1491.489761] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.489917] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1491.490122] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.490270] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1491.490418] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1491.490627] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1491.490789] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1491.490954] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1491.491140] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1491.491315] env[69994]: DEBUG nova.virt.hardware [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1491.492189] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6c1c09-54f5-46e6-aec9-7a5a509096b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.499973] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2fad7e-e003-4213-bf55-a1b8ceedb542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.580682] env[69994]: DEBUG nova.compute.manager [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Received event network-vif-plugged-01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1491.580952] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] Acquiring lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1491.581128] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1491.581296] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1491.581459] env[69994]: DEBUG nova.compute.manager [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] No waiting events found dispatching network-vif-plugged-01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1491.581621] env[69994]: WARNING nova.compute.manager [req-f8b1d99a-d316-4a78-a3e2-8f88bd5d5db3 req-8d589a08-a01a-4fa1-8ffd-8aedff5f2f8d service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Received unexpected event network-vif-plugged-01459963-eeb3-41b6-8c27-c82b360c49cc for instance with vm_state building and task_state spawning. [ 1491.658857] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Successfully updated port: 01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.869934] env[69994]: DEBUG nova.scheduler.client.report [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1492.163944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.164095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.164197] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.374827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1492.392544] env[69994]: INFO nova.scheduler.client.report [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Deleted allocations for instance aefb7903-afd1-4574-bec1-adab769728b5 [ 1492.705536] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1492.900418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cdc1ad51-9162-4a35-9c1c-9ea5c84eab47 tempest-ServerRescueNegativeTestJSON-392299606 tempest-ServerRescueNegativeTestJSON-392299606-project-member] Lock "aefb7903-afd1-4574-bec1-adab769728b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.476s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1492.921598] env[69994]: DEBUG nova.network.neutron [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.427017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1493.427017] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Instance network_info: |[{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1493.427017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:82:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01459963-eeb3-41b6-8c27-c82b360c49cc', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.433944] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1493.434367] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.436603] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c595ace-9f34-41b7-b0bd-3f1a90b84322 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.456022] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.456022] env[69994]: value = "task-2926747" [ 1493.456022] env[69994]: _type = "Task" [ 1493.456022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.463793] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926747, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.604090] env[69994]: DEBUG nova.compute.manager [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Received event network-changed-01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1493.604317] env[69994]: DEBUG nova.compute.manager [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Refreshing instance network info cache due to event network-changed-01459963-eeb3-41b6-8c27-c82b360c49cc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1493.604650] env[69994]: DEBUG oslo_concurrency.lockutils [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] Acquiring lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.604903] env[69994]: DEBUG oslo_concurrency.lockutils [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] Acquired lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1493.605113] env[69994]: DEBUG nova.network.neutron [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Refreshing network info cache for port 01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.966197] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-2926747, 'name': CreateVM_Task, 'duration_secs': 0.294584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.966531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1493.967280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.967465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1493.967752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1493.968018] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-073a075e-0c72-4df3-bee9-a84f1628c8f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.972797] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1493.972797] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6e513-dda3-5255-1394-012451d0c217" [ 1493.972797] env[69994]: _type = "Task" [ 1493.972797] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.980640] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6e513-dda3-5255-1394-012451d0c217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.313896] env[69994]: DEBUG nova.network.neutron [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updated VIF entry in instance network info cache for port 01459963-eeb3-41b6-8c27-c82b360c49cc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.314272] env[69994]: DEBUG nova.network.neutron [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.483107] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52d6e513-dda3-5255-1394-012451d0c217, 'name': SearchDatastore_Task, 'duration_secs': 0.013549} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.483459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1494.483589] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Processing image f75f967d-5bd8-4c15-9a52-96f7e9dd9d48 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.483823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.483967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1494.484159] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.484411] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd28e16f-3ee4-4eae-98d3-520d293408bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.492560] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.492727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.493406] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ee39065-10d4-4bf1-b25b-1c7585ee74be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.498152] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1494.498152] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ff4ff8-8416-2360-3417-02634e1d1c32" [ 1494.498152] env[69994]: _type = "Task" [ 1494.498152] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.505724] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ff4ff8-8416-2360-3417-02634e1d1c32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.817694] env[69994]: DEBUG oslo_concurrency.lockutils [req-11ed6fa9-f18c-4cd0-814b-0bdbea3704c7 req-ead3514a-0710-40b7-bbd9-b9e24ea62f31 service nova] Releasing lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1495.008636] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52ff4ff8-8416-2360-3417-02634e1d1c32, 'name': SearchDatastore_Task, 'duration_secs': 0.008587} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.009420] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8fce48f-fabe-48d8-8ff5-9cdf0d42ab31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.014301] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1495.014301] env[69994]: value = "session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1cf37-439d-2050-b7ea-49acdfcff067" [ 1495.014301] env[69994]: _type = "Task" [ 1495.014301] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.021531] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1cf37-439d-2050-b7ea-49acdfcff067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.524816] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': session[522199a6-e6c3-3d75-4155-19a9e8eaff9c]52e1cf37-439d-2050-b7ea-49acdfcff067, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.525113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1495.525364] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9/8e76eaae-f46f-4bd2-8123-ee2ef96d34f9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.525618] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-825f61d6-857a-444b-a86f-83c78581ed7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.531504] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1495.531504] env[69994]: value = "task-2926748" [ 1495.531504] env[69994]: _type = "Task" [ 1495.531504] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.538565] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.041852] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.042091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48/f75f967d-5bd8-4c15-9a52-96f7e9dd9d48.vmdk to [datastore1] 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9/8e76eaae-f46f-4bd2-8123-ee2ef96d34f9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.042300] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.042545] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83b7d592-8043-444b-a5b4-2d611b212eb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.048900] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1496.048900] env[69994]: value = "task-2926749" [ 1496.048900] env[69994]: _type = "Task" [ 1496.048900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.055746] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.558804] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070841} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.559145] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1496.559859] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bc15c6-c9d5-44b1-a27d-61a3a17d5edf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.581156] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9/8e76eaae-f46f-4bd2-8123-ee2ef96d34f9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1496.581410] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d3f5f2b-3043-4fba-ab55-77241b06fdd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.600188] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1496.600188] env[69994]: value = "task-2926750" [ 1496.600188] env[69994]: _type = "Task" [ 1496.600188] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.607447] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926750, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.109948] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926750, 'name': ReconfigVM_Task, 'duration_secs': 0.262844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.110271] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9/8e76eaae-f46f-4bd2-8123-ee2ef96d34f9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.110868] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a86cd8d4-f629-4453-b5e0-755c2fac6392 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.117345] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1497.117345] env[69994]: value = "task-2926751" [ 1497.117345] env[69994]: _type = "Task" [ 1497.117345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.125731] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926751, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.627448] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926751, 'name': Rename_Task, 'duration_secs': 0.139679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.627729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1497.627969] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc57ba1c-86f7-4396-af04-7b07c6d5c7fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.634586] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1497.634586] env[69994]: value = "task-2926752" [ 1497.634586] env[69994]: _type = "Task" [ 1497.634586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.642433] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.148041] env[69994]: DEBUG oslo_vmware.api [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926752, 'name': PowerOnVM_Task, 'duration_secs': 0.438245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.148427] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1498.148716] env[69994]: INFO nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Took 6.69 seconds to spawn the instance on the hypervisor. [ 1498.148993] env[69994]: DEBUG nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1498.150135] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f4a85e-b21a-4614-869b-ba8f78ba1bed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.669891] env[69994]: INFO nova.compute.manager [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Took 11.37 seconds to build instance. [ 1499.172211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-552dab32-fe21-4331-a9a0-b1e826350e89 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.879s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.287178] env[69994]: DEBUG nova.compute.manager [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Received event network-changed-01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1499.287178] env[69994]: DEBUG nova.compute.manager [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Refreshing instance network info cache due to event network-changed-01459963-eeb3-41b6-8c27-c82b360c49cc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1499.287178] env[69994]: DEBUG oslo_concurrency.lockutils [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] Acquiring lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.287520] env[69994]: DEBUG oslo_concurrency.lockutils [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] Acquired lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1499.287520] env[69994]: DEBUG nova.network.neutron [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Refreshing network info cache for port 01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.991086] env[69994]: DEBUG nova.network.neutron [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updated VIF entry in instance network info cache for port 01459963-eeb3-41b6-8c27-c82b360c49cc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.991506] env[69994]: DEBUG nova.network.neutron [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.494219] env[69994]: DEBUG oslo_concurrency.lockutils [req-754c6ddf-5a76-4d13-a489-fef25d7238b8 req-ed72a6d0-4677-4707-a382-0648bc78aa27 service nova] Releasing lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1527.531545] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.531545] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.531545] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.532093] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.034618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.034878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1529.035076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1529.035247] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1529.036194] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc516f84-3abd-4d21-b3a2-ae20f60baffe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.044413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8b3ba1-b685-43a4-95a2-bb5b16c6d93a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.058759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c721d46-3e8e-45ee-b7ef-601132f296a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.064749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569c2b84-7de2-400e-9631-891ac77637d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.093960] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180529MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1529.094095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.094299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1530.118093] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Instance 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.118382] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1530.118441] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1530.145173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb3ac3a-252e-4cea-8dad-1f768e87da32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.153009] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16264619-efc7-4ecc-ae01-986c008807ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.829755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69aaa2d3-0e98-44b0-915e-ff4859daf629 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.837551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a1b616-5ebf-48aa-8f16-c9e0758e2b30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.850574] env[69994]: DEBUG nova.compute.provider_tree [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1531.354131] env[69994]: DEBUG nova.scheduler.client.report [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1531.859227] env[69994]: DEBUG nova.compute.resource_tracker [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1531.859457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.765s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1533.860133] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.860542] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.365617] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.365814] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.365959] env[69994]: DEBUG oslo_service.periodic_task [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.366135] env[69994]: DEBUG nova.compute.manager [None req-d713d77c-62f9-4f27-b687-e7a790bb1e81 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1536.793057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1536.793431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1536.793556] env[69994]: DEBUG nova.compute.manager [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1536.794448] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5af589-4baa-4606-88ff-964e8bf9eb81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.801567] env[69994]: DEBUG nova.compute.manager [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1536.802151] env[69994]: DEBUG nova.objects.instance [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'flavor' on Instance uuid 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.810638] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1537.811041] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a096f4d6-0aca-4e95-9021-4b8785e4a4ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.818194] env[69994]: DEBUG oslo_vmware.api [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1537.818194] env[69994]: value = "task-2926753" [ 1537.818194] env[69994]: _type = "Task" [ 1537.818194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.827077] env[69994]: DEBUG oslo_vmware.api [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.328031] env[69994]: DEBUG oslo_vmware.api [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926753, 'name': PowerOffVM_Task, 'duration_secs': 0.16654} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.328317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1538.328515] env[69994]: DEBUG nova.compute.manager [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1538.329297] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0bcf1f-6bfd-4655-9904-0e442cc425ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.839895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5016efdd-5ca0-4199-a9c0-2dd858c010cf tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1539.371103] env[69994]: DEBUG nova.objects.instance [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'flavor' on Instance uuid 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1539.875927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.876351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1539.876351] env[69994]: DEBUG nova.network.neutron [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1539.876551] env[69994]: DEBUG nova.objects.instance [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'info_cache' on Instance uuid 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1540.380468] env[69994]: DEBUG nova.objects.base [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Object Instance<8e76eaae-f46f-4bd2-8123-ee2ef96d34f9> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1541.107695] env[69994]: DEBUG nova.network.neutron [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.610750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1542.616816] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1542.617157] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-585e0abc-cad5-4878-9972-d62e2c2123ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.624180] env[69994]: DEBUG oslo_vmware.api [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1542.624180] env[69994]: value = "task-2926754" [ 1542.624180] env[69994]: _type = "Task" [ 1542.624180] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.631361] env[69994]: DEBUG oslo_vmware.api [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.133871] env[69994]: DEBUG oslo_vmware.api [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926754, 'name': PowerOnVM_Task, 'duration_secs': 0.37717} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.134158] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1543.134364] env[69994]: DEBUG nova.compute.manager [None req-832891cb-aa89-4746-8798-953dc3ed0fdb tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1543.135108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b6c9bd-577d-4f36-9856-62591658b73b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.101183] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f28126-7afa-4637-877f-85c6f6c15470 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.107768] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1544.107993] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-91f1f081-ff3d-4524-89a5-f981992f1741 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.114163] env[69994]: DEBUG oslo_vmware.api [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1544.114163] env[69994]: value = "task-2926755" [ 1544.114163] env[69994]: _type = "Task" [ 1544.114163] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.121275] env[69994]: DEBUG oslo_vmware.api [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926755, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.624135] env[69994]: DEBUG oslo_vmware.api [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926755, 'name': SuspendVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.124967] env[69994]: DEBUG oslo_vmware.api [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926755, 'name': SuspendVM_Task, 'duration_secs': 0.543914} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.125252] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1545.125454] env[69994]: DEBUG nova.compute.manager [None req-9580af9b-f5fe-47b5-8d10-c110731273e2 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1545.126188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08807b02-5d56-4cdd-917d-2353d2869055 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.420368] env[69994]: INFO nova.compute.manager [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Resuming [ 1546.420988] env[69994]: DEBUG nova.objects.instance [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'flavor' on Instance uuid 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.930894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.931343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquired lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.931399] env[69994]: DEBUG nova.network.neutron [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.630713] env[69994]: DEBUG nova.network.neutron [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [{"id": "01459963-eeb3-41b6-8c27-c82b360c49cc", "address": "fa:16:3e:2f:82:01", "network": {"id": "504be647-e149-4881-af67-c3ac688904ad", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1788632194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2417f6585042417c95491eb3d7cba343", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01459963-ee", "ovs_interfaceid": "01459963-eeb3-41b6-8c27-c82b360c49cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.133325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Releasing lock "refresh_cache-8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1549.134313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2824fc18-d033-46f9-bb03-10fcc91ae158 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.140825] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Resuming the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1549.141035] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca9d597c-4df5-4361-ab7c-8c716402525d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.146811] env[69994]: DEBUG oslo_vmware.api [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1549.146811] env[69994]: value = "task-2926756" [ 1549.146811] env[69994]: _type = "Task" [ 1549.146811] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.154755] env[69994]: DEBUG oslo_vmware.api [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.659348] env[69994]: DEBUG oslo_vmware.api [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926756, 'name': PowerOnVM_Task, 'duration_secs': 0.497746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.659663] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Resumed the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1549.659832] env[69994]: DEBUG nova.compute.manager [None req-25563544-3199-44d5-8f71-cfa0e13dcd19 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1549.660820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86cb36a-1197-4fa2-9abf-7c17433d3920 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.077960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.077960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.078518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.078518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.078518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1551.080789] env[69994]: INFO nova.compute.manager [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Terminating instance [ 1551.584719] env[69994]: DEBUG nova.compute.manager [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1551.584947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1551.585878] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb18003-065e-444c-97a1-55e5950b7f09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.593622] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.593848] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d58da24-ed75-45d8-acfd-a438b8fced72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.599686] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1551.599686] env[69994]: value = "task-2926757" [ 1551.599686] env[69994]: _type = "Task" [ 1551.599686] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.607088] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.109707] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926757, 'name': PowerOffVM_Task, 'duration_secs': 0.201282} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.110072] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.110143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1552.110378] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f452429-0a27-4f40-8faa-7b6641bf5373 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.172828] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.173013] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.173197] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleting the datastore file [datastore1] 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.173490] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac33994b-5e3e-4a1f-8810-12126a5173c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.179974] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for the task: (returnval){ [ 1552.179974] env[69994]: value = "task-2926759" [ 1552.179974] env[69994]: _type = "Task" [ 1552.179974] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.187127] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.689430] env[69994]: DEBUG oslo_vmware.api [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Task: {'id': task-2926759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150217} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.689706] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1552.689907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1552.690118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1552.690296] env[69994]: INFO nova.compute.manager [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1552.690530] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1552.690722] env[69994]: DEBUG nova.compute.manager [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1552.690817] env[69994]: DEBUG nova.network.neutron [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1553.156296] env[69994]: DEBUG nova.compute.manager [req-f9430f34-4f46-4367-86ee-150d93d55467 req-5f2d8e54-93bf-4c72-9b4a-50f84d6619e2 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Received event network-vif-deleted-01459963-eeb3-41b6-8c27-c82b360c49cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1553.156296] env[69994]: INFO nova.compute.manager [req-f9430f34-4f46-4367-86ee-150d93d55467 req-5f2d8e54-93bf-4c72-9b4a-50f84d6619e2 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Neutron deleted interface 01459963-eeb3-41b6-8c27-c82b360c49cc; detaching it from the instance and deleting it from the info cache [ 1553.156296] env[69994]: DEBUG nova.network.neutron [req-f9430f34-4f46-4367-86ee-150d93d55467 req-5f2d8e54-93bf-4c72-9b4a-50f84d6619e2 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.611425] env[69994]: DEBUG nova.network.neutron [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.659032] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c12d8b1a-70cf-449e-867d-8b5d23a573cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.668774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd63c296-a18b-4bd2-a63f-f92284a6a873 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.691594] env[69994]: DEBUG nova.compute.manager [req-f9430f34-4f46-4367-86ee-150d93d55467 req-5f2d8e54-93bf-4c72-9b4a-50f84d6619e2 service nova] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Detach interface failed, port_id=01459963-eeb3-41b6-8c27-c82b360c49cc, reason: Instance 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1554.114035] env[69994]: INFO nova.compute.manager [-] [instance: 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9] Took 1.42 seconds to deallocate network for instance. [ 1554.620287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1554.620693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1554.620796] env[69994]: DEBUG nova.objects.instance [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lazy-loading 'resources' on Instance uuid 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.154671] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bea6749-2fcc-4879-a23e-16998a9737f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.162751] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a5369f-c4ba-4cee-ac8f-e20c9392a43b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.191796] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0280f23-702c-4d32-a4fa-0c3858e352d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.198272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664cfd08-b38c-4ed5-9ff2-2ef3d1cbed95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.210809] env[69994]: DEBUG nova.compute.provider_tree [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed in ProviderTree for provider: 2173cd1f-90eb-4aab-b51d-83c140d1a7be {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.714302] env[69994]: DEBUG nova.scheduler.client.report [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Inventory has not changed for provider 2173cd1f-90eb-4aab-b51d-83c140d1a7be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1556.220075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1556.241308] env[69994]: INFO nova.scheduler.client.report [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Deleted allocations for instance 8e76eaae-f46f-4bd2-8123-ee2ef96d34f9 [ 1556.749434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-908b34b7-b0f6-4c41-ab9a-188bf6de5910 tempest-ServerActionsTestJSON-219624784 tempest-ServerActionsTestJSON-219624784-project-member] Lock "8e76eaae-f46f-4bd2-8123-ee2ef96d34f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.671s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}